hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cb0b0b1ffbe16668a2110a056d87603ba9b8925d
| 47,481
|
py
|
Python
|
dnacentersdk/api/v2_1_1/wireless.py
|
oboehmer/dnacentersdk
|
25c4e99900640deee91a56aa886874d9cb0ca960
|
[
"MIT"
] | 32
|
2019-09-05T05:16:56.000Z
|
2022-03-22T09:50:38.000Z
|
dnacentersdk/api/v2_1_1/wireless.py
|
oboehmer/dnacentersdk
|
25c4e99900640deee91a56aa886874d9cb0ca960
|
[
"MIT"
] | 35
|
2019-09-07T18:58:54.000Z
|
2022-03-24T19:29:36.000Z
|
dnacentersdk/api/v2_1_1/wireless.py
|
oboehmer/dnacentersdk
|
25c4e99900640deee91a56aa886874d9cb0ca960
|
[
"MIT"
] | 18
|
2019-09-09T11:07:21.000Z
|
2022-03-25T08:49:59.000Z
|
# -*- coding: utf-8 -*-
"""Cisco DNA Center Wireless API wrapper.
Copyright (c) 2019-2021 Cisco Systems.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from builtins import *
from past.builtins import basestring
from ...restsession import RestSession
from ...utils import (
check_type,
dict_from_items_with_values,
apply_path_params,
dict_of_str,
)
class Wireless(object):
"""Cisco DNA Center Wireless API (version: 2.1.1).
Wraps the DNA Center Wireless
API and exposes the API as native Python
methods that return native Python objects.
"""
def __init__(self, session, object_factory, request_validator):
"""Initialize a new Wireless
object with the provided RestSession.
Args:
session(RestSession): The RESTful session object to be used for
API calls to the DNA Center service.
Raises:
TypeError: If the parameter types are incorrect.
"""
check_type(session, RestSession)
super(Wireless, self).__init__()
self._session = session
self._object_factory = object_factory
self._request_validator = request_validator
def retrieve_rf_profiles(self,
rf_profile_name=None,
headers=None,
**request_parameters):
"""Retrieve all RF profiles.
Args:
rf_profile_name(basestring): rf-profile-name query parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(rf_profile_name, basestring)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
'rf-profile-name':
rf_profile_name,
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/rf-profile')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_098cab9141c9a3fe_v2_1_1', json_data)
def create_and_provision_ssid(self,
enableFabric=None,
flexConnect=None,
managedAPLocations=None,
ssidDetails=None,
ssidType=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Creates SSID, updates the SSID to the corresponding site
profiles and provision it to the devices matching the
given sites.
Args:
enableFabric(boolean): enableFabric, property of the request body.
flexConnect(object): Flex Connect Applicable for non fabric profile, property of the request body.
managedAPLocations(list): Managed AP Locations (Enter entire Site(s) hierarchy), property of the request
body (list of strings).
ssidDetails(object): SsidDetails, property of the request body.
ssidType(string): SSID Type, property of the request body. Available values are 'Guest' and
'Enterprise'.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if '__persistbapioutput' in headers:
check_type(headers.get('__persistbapioutput'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = {
'managedAPLocations':
managedAPLocations,
'ssidDetails':
ssidDetails,
'ssidType':
ssidType,
'enableFabric':
enableFabric,
'flexConnect':
flexConnect,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_1eb72ad34e098990_v2_1_1')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/business/ssid')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_1eb72ad34e098990_v2_1_1', json_data)
def delete_rf_profiles(self,
rf_profile_name,
headers=None,
**request_parameters):
"""Delete RF profile(s).
Args:
rf_profile_name(basestring): rf-profile-name path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(rf_profile_name, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'rf-profile-name': rf_profile_name,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/rf-profile/${rf-profile-'
+ 'name}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.delete(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.delete(endpoint_full_url, params=_params)
return self._object_factory('bpm_28b24a744a9994be_v2_1_1', json_data)
def create_wireless_profile(self,
profileDetails=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Creates Wireless Network Profile on DNAC and associates sites
and SSIDs to it.
Args:
profileDetails(object): Profile Details, property of the request body.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = {
'profileDetails':
profileDetails,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_709769624bf988d5_v2_1_1')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/profile')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_709769624bf988d5_v2_1_1', json_data)
def provision_update(self,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Updates wireless provisioning.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(list): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, list)
if headers is not None:
if '__persistbapioutput' in headers:
check_type(headers.get('__persistbapioutput'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = payload or []
if active_validation:
self._request_validator('jsd_87a5ab044139862d_v2_1_1')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/provision')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.put(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.put(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_87a5ab044139862d_v2_1_1', json_data)
def create_enterprise_ssid(self,
enableBroadcastSSID=None,
enableFastLane=None,
enableMACFiltering=None,
fastTransition=None,
name=None,
passphrase=None,
radioPolicy=None,
securityLevel=None,
trafficType=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Creates enterprise SSID.
Args:
enableBroadcastSSID(boolean): enableBroadcastSSID, property of the request body.
enableFastLane(boolean): enableFastLane, property of the request body.
enableMACFiltering(boolean): enableMACFiltering, property of the request body.
fastTransition(string): Fast Transition, property of the request body. Available values are 'Adaptive',
'Enable' and 'Disable'.
name(string): Enter SSID Name, property of the request body. Constraints: maxLength set to 32.
passphrase(string): Pass Phrase (Only applicable for SSID with PERSONAL security level), property of the
request body. Constraints: maxLength set to 63 and minLength set to 8.
radioPolicy(string): Radio Policy, property of the request body. Available values are 'Dual band
operation (2.4GHz and 5GHz)', 'Dual band operation with band select', '5GHz only' and
'2.4GHz only'.
securityLevel(string): Security Level, property of the request body. Available values are
'WPA2_ENTERPRISE', 'WPA2_PERSONAL' and 'OPEN'.
trafficType(string): Traffic Type, property of the request body. Available values are 'voicedata' and
'data'.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = {
'name':
name,
'securityLevel':
securityLevel,
'passphrase':
passphrase,
'enableFastLane':
enableFastLane,
'enableMACFiltering':
enableMACFiltering,
'trafficType':
trafficType,
'radioPolicy':
radioPolicy,
'enableBroadcastSSID':
enableBroadcastSSID,
'fastTransition':
fastTransition,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_8a96fb954d09a349_v2_1_1')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/enterprise-ssid')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_8a96fb954d09a349_v2_1_1', json_data)
def get_wireless_profile(self,
profile_name=None,
headers=None,
**request_parameters):
"""Gets either one or all the wireless network profiles if no name
is provided for network-profile.
Args:
profile_name(basestring): profileName query parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
list: JSON response. A list of MyDict objects.
Access the object's properties by using the dot notation
or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(profile_name, basestring)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
'profileName':
profile_name,
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/profile')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_b3a1c8804c8b9b8b_v2_1_1', json_data)
def create_or_update_rf_profile(self,
channelWidth=None,
defaultRfProfile=None,
enableBrownField=None,
enableCustom=None,
enableRadioTypeA=None,
enableRadioTypeB=None,
name=None,
radioTypeAProperties=None,
radioTypeBProperties=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Create or Update RF profile.
Args:
channelWidth(string): Channel Width, property of the request body.
defaultRfProfile(boolean): defaultRfProfile, property of the request body.
enableBrownField(boolean): enableBrownField, property of the request body.
enableCustom(boolean): enableCustom, property of the request body.
enableRadioTypeA(boolean): enableRadioTypeA, property of the request body.
enableRadioTypeB(boolean): enableRadioTypeB, property of the request body.
name(string): Name, property of the request body.
radioTypeAProperties(object): Radio Type AProperties, property of the request body.
radioTypeBProperties(object): Radio Type BProperties, property of the request body.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = {
'name':
name,
'defaultRfProfile':
defaultRfProfile,
'enableRadioTypeA':
enableRadioTypeA,
'enableRadioTypeB':
enableRadioTypeB,
'channelWidth':
channelWidth,
'enableCustom':
enableCustom,
'enableBrownField':
enableBrownField,
'radioTypeAProperties':
radioTypeAProperties,
'radioTypeBProperties':
radioTypeBProperties,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_b78329674878b815_v2_1_1')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/rf-profile')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_b78329674878b815_v2_1_1', json_data)
def delete_enterprise_ssid(self,
ssid_name,
headers=None,
**request_parameters):
"""Deletes given enterprise SSID.
Args:
ssid_name(basestring): Enter the SSID name to be deleted.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(ssid_name, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'ssidName': ssid_name,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/enterprise-ssid/${ssidName}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.delete(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.delete(endpoint_full_url, params=_params)
return self._object_factory('bpm_c7a6592b4b98a369_v2_1_1', json_data)
def provision(self,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Provision wireless devices.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(list): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, list)
if headers is not None:
if '__persistbapioutput' in headers:
check_type(headers.get('__persistbapioutput'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = payload or []
if active_validation:
self._request_validator('jsd_d09b08a3447aa3b9_v2_1_1')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/provision')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_d09b08a3447aa3b9_v2_1_1', json_data)
def get_enterprise_ssid(self,
ssid_name=None,
headers=None,
**request_parameters):
"""Gets either one or all the enterprise SSID.
Args:
ssid_name(basestring): Enter the enterprise SSID name that needs to be retrieved. If not entered, all
the enterprise SSIDs will be retrieved.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
list: JSON response. A list of MyDict objects.
Access the object's properties by using the dot notation
or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(ssid_name, basestring)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
'ssidName':
ssid_name,
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/enterprise-ssid')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.get(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.get(endpoint_full_url, params=_params)
return self._object_factory('bpm_cca519ba45ebb423_v2_1_1', json_data)
def ap_provision(self,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Provision wireless Access points.
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(list): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, list)
if headers is not None:
if '__persistbapioutput' in headers:
check_type(headers.get('__persistbapioutput'),
basestring)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = payload or []
if active_validation:
self._request_validator('jsd_e9b99b2248c88014_v2_1_1')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/ap-provision')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_e9b99b2248c88014_v2_1_1', json_data)
def ap_provision_and_re_provision(self,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Access Point Provision and ReProvision .
Args:
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(list): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, list)
if headers is not None:
if '__persistbapioutput' in headers:
check_type(headers.get('__persistbapioutput'),
basestring)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = payload or []
if active_validation:
self._request_validator('jsd_d89719b847aaa9c4_v2_1_1')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/ap-provision')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.post(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_d89719b847aaa9c4_v2_1_1', json_data)
def update_wireless_profile(self,
profileDetails=None,
headers=None,
payload=None,
active_validation=True,
**request_parameters):
"""Updates the wireless Network Profile with updated details
provided. All sites to be present in the network profile
should be provided.
Args:
profileDetails(object): Profile Details, property of the request body.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
payload(dict): A JSON serializable Python object to send in the
body of the Request.
active_validation(bool): Enable/Disable payload validation.
Defaults to True.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(payload, dict)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
}
_payload = {
'profileDetails':
profileDetails,
}
_payload.update(payload or {})
_payload = dict_from_items_with_values(_payload)
if active_validation:
self._request_validator('jsd_cfbd3870405aad55_v2_1_1')\
.validate(_payload)
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless/profile')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.put(endpoint_full_url, params=_params,
json=_payload,
headers=_headers)
else:
json_data = self._session.put(endpoint_full_url, params=_params,
json=_payload)
return self._object_factory('bpm_cfbd3870405aad55_v2_1_1', json_data)
def delete_wireless_profile(self,
wireless_profile_name,
headers=None,
**request_parameters):
"""Delete the Wireless Profile from DNAC whose name is provided.
Args:
wireless_profile_name(basestring): wirelessProfileName path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(wireless_profile_name, basestring,
may_be_none=False)
if headers is not None:
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'wirelessProfileName': wireless_profile_name,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/wireless-'
+ 'profile/${wirelessProfileName}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.delete(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.delete(endpoint_full_url, params=_params)
return self._object_factory('bpm_e39588a5494982c4_v2_1_1', json_data)
def delete_ssid_and_provision_it_to_devices(self,
managed_aplocations,
ssid_name,
headers=None,
**request_parameters):
"""Removes SSID or WLAN from the network profile, reprovision the
device(s) and deletes the SSID or WLAN from DNA Center.
Args:
ssid_name(basestring): ssidName path parameter.
managed_aplocations(basestring): managedAPLocations path parameter.
headers(dict): Dictionary of HTTP Headers to send with the Request
.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
MyDict: JSON response. Access the object's properties by using
the dot notation or the bracket notation.
Raises:
TypeError: If the parameter types are incorrect.
MalformedRequest: If the request body created is invalid.
ApiError: If the DNA Center cloud returns an error.
"""
check_type(headers, dict)
check_type(ssid_name, basestring,
may_be_none=False)
check_type(managed_aplocations, basestring,
may_be_none=False)
if headers is not None:
if '__persistbapioutput' in headers:
check_type(headers.get('__persistbapioutput'),
basestring, may_be_none=False)
if 'X-Auth-Token' in headers:
check_type(headers.get('X-Auth-Token'),
basestring, may_be_none=False)
_params = {
}
_params.update(request_parameters)
_params = dict_from_items_with_values(_params)
path_params = {
'ssidName': ssid_name,
'managedAPLocations': managed_aplocations,
}
with_custom_headers = False
_headers = self._session.headers or {}
if headers:
_headers.update(dict_of_str(headers))
with_custom_headers = True
e_url = ('/dna/intent/api/v1/business/ssid/${ssidName}/${managedAP'
+ 'Locations}')
endpoint_full_url = apply_path_params(e_url, path_params)
if with_custom_headers:
json_data = self._session.delete(endpoint_full_url, params=_params,
headers=_headers)
else:
json_data = self._session.delete(endpoint_full_url, params=_params)
return self._object_factory('bpm_fc9538fe43d9884d_v2_1_1', json_data)
| 39.766332
| 116
| 0.573429
|
ecef86831b53ac98803d10967d4fb4d16d3304ee
| 1,038
|
py
|
Python
|
examples/libtest/TupleTest.py
|
allbuttonspressed/pyjs
|
c726fdead530eb63ee4763ae15daaa58d84cd58f
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
examples/libtest/TupleTest.py
|
allbuttonspressed/pyjs
|
c726fdead530eb63ee4763ae15daaa58d84cd58f
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
examples/libtest/TupleTest.py
|
allbuttonspressed/pyjs
|
c726fdead530eb63ee4763ae15daaa58d84cd58f
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-11-18T14:17:59.000Z
|
2019-11-18T14:17:59.000Z
|
from UnitTest import UnitTest
class TupleTest(UnitTest):
def testGetItem(self):
t = (1, 2)
self.assertEqual(t[True], 2)
self.assertEqual(t[False], 1)
def testContains(self):
value = (0, 1, 2, 3, 4)
self.assertTrue(1 in value)
self.assertFalse(10 in value)
def testTupleAdd(self):
t1 = (1,2)
t2 = (3,4)
added = t1 + t2
self.assertTrue(added == (1,2,3,4), "t1 + t2")
t1 += t2
self.assertTrue(t1 == (1,2,3,4), "t1 += t2")
def testIter2(self):
i = 0
for item in (0,1,2,3):
self.assertEqual(item, i)
i += 1
i = 0
for item in (0,1,2,3)[1:-1]:
i += item
self.assertEqual(i, 3)
def testIter(self):
t = (0,1,2,3)
i = 0
it = t.__iter__()
while True:
try:
item = it.next()
except StopIteration:
break
self.assertEqual(item, t[i])
i += 1
| 22.565217
| 54
| 0.454721
|
0a25fd5ed4ee0805fb1f3ae5a245c487ef2d672a
| 18,697
|
py
|
Python
|
pipeline_plugins/tests/variables/collections/sites/open/cc/test_var_cmdb_set_module_ip_selector.py
|
springborland/bk-sops
|
a9057672c10efb5f2414a805a30ead4092429c76
|
[
"Apache-2.0"
] | null | null | null |
pipeline_plugins/tests/variables/collections/sites/open/cc/test_var_cmdb_set_module_ip_selector.py
|
springborland/bk-sops
|
a9057672c10efb5f2414a805a30ead4092429c76
|
[
"Apache-2.0"
] | null | null | null |
pipeline_plugins/tests/variables/collections/sites/open/cc/test_var_cmdb_set_module_ip_selector.py
|
springborland/bk-sops
|
a9057672c10efb5f2414a805a30ead4092429c76
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from mock import MagicMock, patch
from django.test import TestCase
from pipeline_plugins.variables.collections.sites.open.cmdb.var_cmdb_set_module_ip_selector import SetModuleIpSelector
GET_CLIENT_BY_USER = "pipeline_plugins.variables.utils.get_client_by_user"
CC_GET_IPS_INFO_BY_STR = (
"pipeline_plugins.variables.collections.sites.open.cmdb." "var_cmdb_set_module_ip_selector.cc_get_ips_info_by_str"
)
CMDB_API_FUNC_PREFIX = "pipeline_plugins.variables.utils"
LIST_BIZ_HOSTS = "{}.list_biz_hosts".format(CMDB_API_FUNC_PREFIX)
FIND_MODULE_WITH_RELATION = "{}.find_module_with_relation".format(CMDB_API_FUNC_PREFIX)
GET_SERVICE_TEMPLATE_LIST = "{}.get_service_template_list".format(CMDB_API_FUNC_PREFIX)
GET_SET_LIST = "{}.get_set_list".format(CMDB_API_FUNC_PREFIX)
GET_MODULE_LIST = "{}.get_module_list".format(CMDB_API_FUNC_PREFIX)
class MockClient(object):
def __init__(
self,
search_set_return=None,
list_biz_hosts_topo_return=None,
find_module_with_relation_return=None,
list_biz_hosts_return=None,
list_service_template_return=None,
find_module_batch_return=None,
cc_get_ips_info_by_str_return=None,
):
self.cc = MagicMock()
self.cc.list_biz_hosts_topo = MagicMock(return_value=list_biz_hosts_topo_return)
self.cc.find_module_with_relation = MagicMock(return_value=find_module_with_relation_return)
self.cc.list_biz_hosts = MagicMock(return_value=list_biz_hosts_return)
self.cc.search_set = MagicMock(return_value=search_set_return)
self.cc.list_service_template = MagicMock(return_value=list_service_template_return)
self.cc.find_module_batch = MagicMock(return_value=find_module_batch_return)
self.cc_get_ips_info_by_str = MagicMock(return_value=cc_get_ips_info_by_str_return)
mock_project_obj = MagicMock()
mock_project = MagicMock()
mock_project.objects.get = MagicMock(return_value=mock_project_obj)
SELECT_METHOD_SUC_CLIENT = MockClient(
list_biz_hosts_return={
"result": True,
"code": 0,
"message": "success",
"data": {
"count": 2,
"info": [
{
"bk_cloud_id": 0,
"bk_host_id": 1,
"bk_host_innerip": "192.168.15.18",
"bk_mac": "",
"bk_os_type": None,
},
{
"bk_cloud_id": 0,
"bk_host_id": 2,
"bk_host_innerip": "192.168.15.4",
"bk_mac": "",
"bk_os_type": None,
},
],
},
},
list_service_template_return={
"result": True,
"code": 0,
"message": "success",
"permission": None,
"data": {"count": 2, "info": [{"id": 51, "name": "test3"}, {"id": 50, "name": "test2"}]},
},
search_set_return={
"result": True,
"code": 0,
"message": "",
"data": {
"count": 1,
"info": [
{"default": 1, "bk_set_id": 30, "bk_set_name": "空闲机"},
{"default": 0, "bk_set_id": 31, "bk_set_name": "集群1"},
{"default": 0, "bk_set_id": 32, "bk_set_name": "集群2"},
{"default": 0, "bk_set_id": 33, "bk_set_name": "集群3"},
{"default": 0, "bk_set_id": 34, "bk_set_name": "集群4"},
{"default": 0, "bk_set_id": 38, "bk_set_name": "集群5"},
{"default": 0, "bk_set_id": 39, "bk_set_name": "集群6"},
],
},
},
find_module_with_relation_return={
"result": True,
"code": 0,
"message": "success",
"data": {"count": 2, "info": [{"bk_module_id": 60}, {"bk_module_id": 61}]},
},
)
MANUAL_METHOD_SUC_CLIENT = MockClient(
list_biz_hosts_return={
"result": True,
"code": 0,
"message": "success",
"data": {
"count": 2,
"info": [
{
"bk_cloud_id": 0,
"bk_host_id": 1,
"bk_host_innerip": "192.168.15.18",
"bk_mac": "",
"bk_os_type": None,
},
{
"bk_cloud_id": 0,
"bk_host_id": 2,
"bk_host_innerip": "192.168.15.4",
"bk_mac": "",
"bk_os_type": None,
},
],
},
},
list_service_template_return={
"result": True,
"code": 0,
"message": "success",
"permission": None,
"data": {"count": 2, "info": [{"id": 51, "name": "test3"}, {"id": 50, "name": "test2"}]},
},
search_set_return={
"result": True,
"code": 0,
"message": "",
"data": {
"count": 1,
"info": [
{"default": 1, "bk_set_id": 30, "bk_set_name": "空闲机"},
{"default": 0, "bk_set_id": 31, "bk_set_name": "集群1"},
{"default": 0, "bk_set_id": 32, "bk_set_name": "集群2"},
{"default": 0, "bk_set_id": 33, "bk_set_name": "集群3"},
{"default": 0, "bk_set_id": 34, "bk_set_name": "集群4"},
{"default": 0, "bk_set_id": 38, "bk_set_name": "集群5"},
{"default": 0, "bk_set_id": 39, "bk_set_name": "集群6"},
],
},
},
find_module_with_relation_return={
"result": True,
"code": 0,
"message": "success",
"data": {"count": 2, "info": [{"bk_module_id": 60}, {"bk_module_id": 61}]},
},
)
CUSTOM_METHOD_SUC_CLIENT = MockClient(
list_biz_hosts_return={
"result": True,
"code": 0,
"message": "success",
"data": {
"count": 2,
"info": [
{
"bk_cloud_id": 0,
"bk_host_id": 1,
"bk_host_innerip": "192.168.15.18",
"bk_mac": "",
"bk_os_type": None,
},
{
"bk_cloud_id": 0,
"bk_host_id": 2,
"bk_host_innerip": "192.168.15.4",
"bk_mac": "",
"bk_os_type": None,
},
],
},
},
list_service_template_return={
"result": True,
"code": 0,
"message": "success",
"permission": None,
"data": {"count": 2, "info": [{"id": 51, "name": "test3"}, {"id": 50, "name": "test2"}]},
},
search_set_return={
"result": True,
"code": 0,
"message": "",
"data": {
"count": 1,
"info": [
{"default": 1, "bk_set_id": 30, "bk_set_name": "空闲机"},
{"default": 0, "bk_set_id": 31, "bk_set_name": "集群1"},
{"default": 0, "bk_set_id": 32, "bk_set_name": "集群2"},
{"default": 0, "bk_set_id": 33, "bk_set_name": "集群3"},
{"default": 0, "bk_set_id": 34, "bk_set_name": "集群4"},
{"default": 0, "bk_set_id": 38, "bk_set_name": "集群5"},
{"default": 0, "bk_set_id": 39, "bk_set_name": "集群6"},
],
},
},
find_module_with_relation_return={
"result": True,
"code": 0,
"message": "success",
"data": {"count": 2, "info": [{"bk_module_id": 60}, {"bk_module_id": 61}]},
},
cc_get_ips_info_by_str_return={"result": True, "code": 0, "message": "success", "data": {}},
)
SELECT_METHOD_FAIL_CLIENT = MockClient(
list_biz_hosts_return={"result": False, "code": 0, "message": "success", "data": {}},
list_service_template_return={
"result": True,
"code": 0,
"message": "success",
"permission": None,
"data": {"count": 2, "info": [{"id": 51, "name": "test3"}, {"id": 50, "name": "test2"}]},
},
search_set_return={
"result": True,
"code": 0,
"message": "",
"data": {
"count": 1,
"info": [
{"default": 1, "bk_set_id": 30, "bk_set_name": "空闲机"},
{"default": 0, "bk_set_id": 32, "bk_set_name": "集群2"},
{"default": 0, "bk_set_id": 33, "bk_set_name": "集群3"},
{"default": 0, "bk_set_id": 34, "bk_set_name": "集群4"},
{"default": 0, "bk_set_id": 38, "bk_set_name": "集群5"},
{"default": 0, "bk_set_id": 39, "bk_set_name": "集群6"},
],
},
},
find_module_with_relation_return={
"result": True,
"code": 0,
"message": "success",
"data": {"count": 2, "info": [{"bk_module_id": 60}, {"bk_module_id": 61}]},
},
)
MANUAL_METHOD_FAIL_CLIENT = MockClient(
list_biz_hosts_return={"result": False, "code": 0, "message": "success", "data": {}},
list_service_template_return={
"result": True,
"code": 0,
"message": "success",
"permission": None,
"data": {"count": 2, "info": [{"id": 51, "name": "test3"}, {"id": 50, "name": "test2"}]},
},
search_set_return={
"result": True,
"code": 0,
"message": "",
"data": {
"count": 1,
"info": [
{"default": 1, "bk_set_id": 30, "bk_set_name": "空闲机"},
{"default": 0, "bk_set_id": 32, "bk_set_name": "集群2"},
{"default": 0, "bk_set_id": 33, "bk_set_name": "集群3"},
{"default": 0, "bk_set_id": 34, "bk_set_name": "集群4"},
{"default": 0, "bk_set_id": 38, "bk_set_name": "集群5"},
{"default": 0, "bk_set_id": 39, "bk_set_name": "集群6"},
],
},
},
find_module_with_relation_return={
"result": True,
"code": 0,
"message": "success",
"data": {"count": 2, "info": [{"bk_module_id": 60}, {"bk_module_id": 61}]},
},
)
CUSTOM_METHOD_FAIL_CLIENT = MockClient(
list_biz_hosts_return={"result": False, "code": 0, "message": "success", "data": {}},
list_service_template_return={
"result": True,
"code": 0,
"message": "success",
"permission": None,
"data": {"count": 2, "info": [{"id": 51, "name": "test3"}, {"id": 50, "name": "test2"}]},
},
search_set_return={
"result": True,
"code": 0,
"message": "",
"data": {
"count": 1,
"info": [
{"default": 1, "bk_set_id": 30, "bk_set_name": "空闲机"},
{"default": 0, "bk_set_id": 32, "bk_set_name": "集群2"},
{"default": 0, "bk_set_id": 33, "bk_set_name": "集群3"},
{"default": 0, "bk_set_id": 34, "bk_set_name": "集群4"},
{"default": 0, "bk_set_id": 38, "bk_set_name": "集群5"},
{"default": 0, "bk_set_id": 39, "bk_set_name": "集群6"},
],
},
},
find_module_with_relation_return={
"result": True,
"code": 0,
"message": "success",
"data": {"count": 2, "info": [{"bk_module_id": 60}, {"bk_module_id": 61}]},
},
)
IP_SELECTOR_SELECT_METHOD_SUC_VALUE = {
"var_ip_method": "select",
"var_ip_custom_value": "",
"var_ip_select_value": {"var_set": ["空闲机", "集群1"], "var_module": ["db"], "var_module_name": "ip"},
"var_ip_manual_value": {"var_manual_set": "", "var_manual_module": "", "var_module_name": ""},
"var_filter_set": "集群1,集群2",
"var_filter_module": "ls",
}
IP_SELECTOR_SELECT_METHOD_FAIL_VALUE = {
"var_ip_method": "select",
"var_ip_custom_value": "",
"var_ip_select_value": {"var_set": ["空闲机", "集群1"], "var_module": ["db"], "var_module_name": "ip"},
"var_ip_manual_value": {"var_manual_set": "", "var_manual_module": "", "var_module_name": ""},
"var_filter_set": "集群1,集群2",
"var_filter_module": "ls",
}
IP_SELECTOR_MANUAL_METHOD_SUC_VALUE = {
"var_ip_method": "manual",
"var_ip_custom_value": "",
"var_ip_select_value": {"var_set": ["空闲机", "集群1"], "var_module": ["db"], "var_module_name": "ip"},
"var_ip_manual_value": {"var_manual_set": "空闲机,集群1", "var_manual_module": "all,db", "var_module_name": ""},
"var_filter_set": "集群1,集群2",
"var_filter_module": "ls",
}
IP_SELECTOR_MANUAL_METHOD_FAIL_VALUE = {
"var_ip_method": "manual",
"var_ip_custom_value": "",
"var_ip_select_value": {"var_set": ["空闲机", "集群1"], "var_module": ["db"], "var_module_name": "ip"},
"var_ip_manual_value": {"var_manual_set": "空闲机,集群1", "var_manual_module": "all,db", "var_module_name": ""},
"var_filter_set": "集群1,集群2",
"var_filter_module": "ls",
}
IP_SELECTOR_CUSTOM_METHOD_SUC_VALUE = {
"var_ip_method": "custom",
"var_ip_custom_value": "192.168.15.18,192.168.15.4",
"var_ip_select_value": {"var_set": [], "var_module": [], "var_module_name": "ip"},
"var_ip_manual_value": {"var_manual_set": "", "var_manual_module": "", "var_module_name": ""},
"var_filter_set": "集群1,集群2",
"var_filter_module": "ls",
}
IP_SELECTOR_CUSTOM_METHOD_FAIL_VALUE = {
"var_ip_method": "custom",
"var_ip_custom_value": "192.168.15.18,192.168.15.4",
"var_ip_select_value": {"var_set": [], "var_module": [], "var_module_name": "ip"},
"var_ip_manual_value": {"var_manual_set": "", "var_manual_module": "", "var_module_name": ""},
"var_filter_set": "集群1,集群2",
"var_filter_module": "ls",
}
class VarCmdbSetModuleIpSelectorTestCase(TestCase):
def setUp(self):
self.supplier_account = "supplier_account_token"
self.project_patcher = patch(
"pipeline_plugins.variables.collections.sites.open.cmdb.var_cmdb_set_module_ip_selector.Project",
mock_project,
)
self.get_business_host_return = [
{"bk_host_innerip": "1.1.1.1", "bk_cloud_id": 1, "bk_attr": 1},
{"bk_host_innerip": "1.1.1.2", "bk_cloud_id": 2, "bk_attr": 2},
{"bk_host_innerip": "1.1.1.3", "bk_attr": 3},
]
self.bk_biz_id = 1
mock_project_obj.bk_biz_id = self.bk_biz_id
self.supplier_account_for_project_patcher = patch(
"pipeline_plugins.variables.collections.sites.open.cmdb.var_cmdb_set_module_ip_selector."
"supplier_account_for_project",
MagicMock(return_value=self.supplier_account),
)
self.pipeline_data = {"executor": "admin", "biz_cc_id": 123, "project_id": 1}
self.project_patcher.start()
self.supplier_account_for_project_patcher.start()
self.select_method_success_return = "192.168.15.18,192.168.15.4"
self.select_method_get_ip_fail_return = ""
self.manual_method_success_return = "192.168.15.18,192.168.15.4"
self.manual_method_fail_return = ""
self.custom_method_success_return = "192.168.15.18,192.168.15.4"
self.custom_method_fail_return = ""
def tearDown(self):
self.project_patcher.stop()
self.supplier_account_for_project_patcher.stop()
@patch(GET_CLIENT_BY_USER, return_value=SELECT_METHOD_SUC_CLIENT)
def test_select_method_success_case(self, mock_get_client_by_user_return):
set_module_ip_selector = SetModuleIpSelector(
pipeline_data=self.pipeline_data,
value=IP_SELECTOR_SELECT_METHOD_SUC_VALUE,
name="test_select_method_success_case",
context={},
)
self.assertEqual(self.select_method_success_return, set_module_ip_selector.get_value())
@patch(GET_CLIENT_BY_USER, return_value=SELECT_METHOD_FAIL_CLIENT)
def test_select_method_get_ip_fail_case(self, mock_get_client_by_user_return):
set_module_ip_selector = SetModuleIpSelector(
pipeline_data=self.pipeline_data,
value=IP_SELECTOR_SELECT_METHOD_FAIL_VALUE,
name="test_select_method_get_ip_fail_case",
context={},
)
self.assertEqual(self.select_method_get_ip_fail_return, set_module_ip_selector.get_value())
@patch(GET_CLIENT_BY_USER, return_value=MANUAL_METHOD_SUC_CLIENT)
def test_manual_method_success_case(self, mock_get_client_by_user_return):
set_module_ip_selector = SetModuleIpSelector(
pipeline_data=self.pipeline_data,
value=IP_SELECTOR_MANUAL_METHOD_SUC_VALUE,
name="test_manual_method_success_case",
context={},
)
self.assertEqual(self.manual_method_success_return, set_module_ip_selector.get_value())
@patch(GET_CLIENT_BY_USER, return_value=MANUAL_METHOD_FAIL_CLIENT)
def test_manual_method_fail_case(self, mock_get_client_by_user_return):
set_module_ip_selector = SetModuleIpSelector(
pipeline_data=self.pipeline_data,
value=IP_SELECTOR_MANUAL_METHOD_FAIL_VALUE,
name="test_manual_method_success_case",
context={},
)
self.assertEqual(self.manual_method_fail_return, set_module_ip_selector.get_value())
@patch(GET_CLIENT_BY_USER, return_value=CUSTOM_METHOD_SUC_CLIENT)
def test_custom_method_success_case(self, mock_get_client_by_user_return):
set_module_ip_selector = SetModuleIpSelector(
pipeline_data=self.pipeline_data,
value=IP_SELECTOR_MANUAL_METHOD_SUC_VALUE,
name="test_custom_method_success_case",
context={},
)
self.assertEqual(self.custom_method_success_return, set_module_ip_selector.get_value())
@patch(GET_CLIENT_BY_USER, return_value=CUSTOM_METHOD_FAIL_CLIENT)
def test_custom_method_fail_case(self, mock_get_client_by_user_return):
set_module_ip_selector = SetModuleIpSelector(
pipeline_data=self.pipeline_data,
value=IP_SELECTOR_CUSTOM_METHOD_FAIL_VALUE,
name="test_manual_method_success_case",
context={},
)
self.assertEqual(self.custom_method_fail_return, set_module_ip_selector.get_value())
| 39.865672
| 118
| 0.574959
|
51c86882b5eae136ce4b62d1cda4ef68dc3c564e
| 7,816
|
py
|
Python
|
TweetPinnaGraphs.py
|
IngoKl/TweetPinna
|
cf5908721486181accbb5a52455ff2e44ca385c9
|
[
"MIT"
] | 4
|
2020-02-01T19:15:03.000Z
|
2022-02-04T21:40:08.000Z
|
TweetPinnaGraphs.py
|
IngoKl/TweetPinna
|
cf5908721486181accbb5a52455ff2e44ca385c9
|
[
"MIT"
] | 4
|
2017-02-12T11:50:18.000Z
|
2020-04-12T15:10:32.000Z
|
TweetPinnaGraphs.py
|
IngoKl/TweetPinna
|
cf5908721486181accbb5a52455ff2e44ca385c9
|
[
"MIT"
] | 1
|
2017-09-04T02:21:23.000Z
|
2017-09-04T02:21:23.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""TweetPinna - Twitter Status Archiver - Dashboard.
TweetPinna streams Twitter statuses into a
MongoDB database based on given search terms.
It is also capable of retrieving a user's timeline.
This script generates graphs for the dashboard.
It is supposed to run as a cronjob.
e.g.
*/10 * * * * bash -c "cd /root/TweetPinna &&
python TweetPinnaGraphs.py TweetPinnaDefault.cfg"
Author: Ingo Kleiber <ingo@kleiber.me> (2017)
License: MIT
Version: 1.1.1
Status: Protoype
Example:
$ python TweetPinnaGraphs.py config.cfg
"""
import matplotlib
matplotlib.use('Agg')
from pymongo import MongoClient
from TweetPinna import check_config
from TweetPinna import Logger
import config
import matplotlib.pyplot as plt
import os
import pandas as pd
import sys
import time
import numpy as np
try:
if os.path.isfile(sys.argv[1]):
if check_config(sys.argv[1]):
cfg = config.Config(open(sys.argv[1], 'r'))
log = Logger(cfg)
else:
print ('Configuration appears to be faulty')
sys.exit(1)
else:
print ('Configuration file {} could not be found'.format(sys.argv[1]))
sys.exit(1)
except IndexError:
print ('Using default configuration')
cfg = config.Config(open('cfg/TweetPinnaDefault.cfg', 'r'))
log = Logger(cfg)
plt.style.use('ggplot')
log = Logger(cfg)
if not os.path.isdir('dashboard/static/img/results'):
os.makedirs('dashboard/static/img/results')
# MongoDB
mongo_client = MongoClient(cfg['mongo_path'])
mongo_db = mongo_client[cfg['mongo_db']]
mongo_coll_tweets = mongo_db[cfg['mongo_coll']]
def tweets_by_hour(n):
"""Generating a barchart showing the last n tweets by hour.
:param int n: the number of tweets to consider
"""
try:
tweet_timestamps = list(mongo_coll_tweets.find(
{'timestamp_ms': {'$exists': True}}, {'timestamp_ms': 1, '_id': 0}).sort([['_id', -1]]).limit(n))
tweet_datetimes = pd.to_datetime(
np.array(list(map(int, [d['timestamp_ms'] for d in tweet_timestamps]))), unit='ms')
df = pd.DataFrame(tweet_datetimes, columns=['date'])
df.set_index('date', drop=False, inplace=True)
grouped_df = df.groupby(pd.Grouper(freq='1h')).count()
grouped_df_average = grouped_df["date"].sum() / len(grouped_df)
tweets_by_hour = grouped_df.plot(
kind='bar', legend=False, color='#262626', rot=75)
tweets_by_hour.set_xlabel('Date', fontsize=12)
tweets_by_hour.set_ylabel('Nr. of Tweets', fontsize=12)
tweets_by_hour.set_title(
'Tweets by Hour\n({} Tweets, avg. {} Tweets/h)\n {}'.format(
n, round(grouped_df_average), time.strftime("%Y-%m-%d %H:%M:%S")),
position=(0.5, 1.05))
tweets_by_hour.get_figure().savefig(
'dashboard/static/img/results/tweets-by-hour.png',
bbox_inches='tight')
log.log_add(1, 'Graph tweets-by-hour.png created')
except Exception as e:
log.log_add(3, 'Graph tweets-by-hour.png could \
not be created ({})'.format(e))
return False
def tweets_by_day(n):
"""Generating a barchart showing the last n tweets by day.
:param int n: the number of tweets to consider
"""
try:
tweet_timestamps = list(mongo_coll_tweets.find(
{'timestamp_ms': {'$exists': True}}, {'timestamp_ms': 1, '_id': 0}).sort([['_id', -1]]).limit(n))
tweet_datetimes = pd.to_datetime(
np.array(list(map(int, [d['timestamp_ms'] for d in tweet_timestamps]))), unit='ms')
df = pd.DataFrame(tweet_datetimes, columns=['date'])
df.set_index('date', drop=False, inplace=True)
grouped_df = df.groupby(pd.Grouper(freq='1d')).count()
grouped_df_average = grouped_df["date"].sum() / len(grouped_df)
grouped_df['day'] = grouped_df.date.keys().strftime('%Y-%m-%d')
tweets_by_day = grouped_df.plot(
kind='bar', x='day', legend=False, color='#262626',
rot=75)
tweets_by_day.set_xlabel('Date', fontsize=12)
tweets_by_day.set_ylabel('Nr. of Tweets', fontsize=12)
tweets_by_day.set_title(
'Tweets by Day\n({} Tweets, avg. {} Tweets/day)\n {}'.
format(n, round(grouped_df_average), time.strftime("%Y-%m-%d %H:%M:%S")),
position=(0.5, 1.05))
tweets_by_day.get_figure().savefig(
'dashboard/static/img/results/tweets-by-day.png',
bbox_inches='tight')
log.log_add(1, 'Graph tweets-by-day.png created')
except Exception as e:
log.log_add(3,
'Graph tweets-bay-day.png could \
not be created ({})'.format(e))
return False
def tweets_over_time(n):
"""Generating a chart of the overall development of the collection.
:param int n: the number of tweets to consider
"""
try:
tweet_timestamps = list(mongo_coll_tweets.find(
{'timestamp_ms': {'$exists': True}}, {'timestamp_ms': 1, '_id': 0}).sort([['_id', -1]]).limit(n))
tweet_datetimes = pd.to_datetime(
np.array(list(map(int, [d['timestamp_ms'] for d in tweet_timestamps]))), unit='ms')
df = pd.DataFrame(tweet_datetimes, columns=['date'])
df.set_index('date', drop=False, inplace=True)
grouped_df = df.groupby(pd.Grouper(freq='1d')).count()
grouped_df_average = grouped_df["date"].sum() / len(grouped_df)
tweets_over_time = grouped_df.cumsum().plot(
kind='area', legend=False, color='#262626',
stacked='False', rot=75)
tweets_over_time.set_xlabel('Date', fontsize=12)
tweets_over_time.set_ylabel('Nr. of Additional Tweets', fontsize=12)
tweets_over_time.set_title('Tweets over Time\n({} Tweets, avg. {} Tweets/day)\n {})'.
format(n, round(grouped_df_average), time.strftime("%Y-%m-%d %H:%M:%S")),
position=(0.5, 1.05))
tweets_over_time.get_figure().savefig(
'dashboard/static/img/results/tweets-over-time.png',
bbox_inches='tight')
log.log_add(1, 'Graph tweets-over-time.png created')
except Exception as e:
log.log_add(3,
'Graph tweets-over-time.png could \
not be created ({})'.format(e))
return False
if __name__ == '__main__':
# Graphs
# Tweets by Hour
if (os.path.isfile('dashboard/static/img/results/tweets-by-hour.png')):
if (os.path.getmtime
('dashboard/static/img/results/tweets-by-hour.png') <
(time.time() - cfg['refresh_graphs'] * 60)):
tweets_by_hour(cfg['tweets_by_hour_number'])
else:
tweets_by_hour(cfg['tweets_by_hour_number'])
# Tweets by Day
if (os.path.isfile('dashboard/static/img/results/tweets-by-day.png')):
if (os.path.getmtime
('dashboard/static/img/results/tweets-by-day.png') <
(time.time() - cfg['refresh_graphs'] * 60)):
tweets_by_day(cfg['tweets_by_day_number'])
else:
tweets_by_day(cfg['tweets_by_day_number'])
# Tweets over Time
if (os.path.isfile('dashboard/static/img/results/tweets-over-time.png')):
if (os.path.getmtime
('dashboard/static/img/results/tweets-over-time.png') <
(time.time() - cfg['refresh_graphs'] * 60)):
tweets_over_time(cfg['tweets_overall_limit'])
else:
tweets_over_time(cfg['tweets_overall_limit'])
| 38.313725
| 110
| 0.600947
|
9e75305ddf19625ed5e037d82ec8fff113412795
| 52,832
|
py
|
Python
|
ibis/backends/pyspark/compiler.py
|
LeeTZ/ibis
|
165f78de8f4f0121ba2c601b5c9f89bc0f65a593
|
[
"Apache-2.0"
] | null | null | null |
ibis/backends/pyspark/compiler.py
|
LeeTZ/ibis
|
165f78de8f4f0121ba2c601b5c9f89bc0f65a593
|
[
"Apache-2.0"
] | 1
|
2021-03-25T14:07:29.000Z
|
2021-03-25T14:07:29.000Z
|
ibis/backends/pyspark/compiler.py
|
LeeTZ/ibis
|
165f78de8f4f0121ba2c601b5c9f89bc0f65a593
|
[
"Apache-2.0"
] | null | null | null |
import collections
import enum
import functools
import operator
import numpy as np
import pyspark
import pyspark.sql.functions as F
from pyspark.sql import Window
from pyspark.sql.functions import PandasUDFType, pandas_udf
import ibis.common.exceptions as com
import ibis.expr.datatypes as dtypes
import ibis.expr.operations as ops
import ibis.expr.types as ir
import ibis.expr.types as types
from ibis import interval
from ibis.backends.pandas.execution import execute
from ibis.backends.spark.compiler import SparkContext, SparkDialect
from ibis.backends.spark.datatypes import (
ibis_array_dtype_to_spark_dtype,
ibis_dtype_to_spark_dtype,
spark_dtype,
)
from ibis.expr.timecontext import adjust_context
from ibis.util import coerce_to_dataframe, guid
from .operations import PySparkTable
from .timecontext import combine_time_context, filter_by_time_context
class PySparkContext(SparkContext):
pass
class AggregationContext(enum.Enum):
ENTIRE = 0
WINDOW = 1
GROUP = 2
class PySparkExprTranslator:
_registry = {}
context_class = PySparkContext
@classmethod
def compiles(cls, klass):
def decorator(f):
cls._registry[klass] = f
return f
return decorator
def translate(self, expr, scope, timecontext, **kwargs):
"""
Translate Ibis expression into a PySpark object.
All translated expressions are cached within scope. If an expression is
found within scope, it's returned. Otherwise, the it's translated and
cached for future reference.
:param expr: ibis expression
:param scope: dictionary mapping from operation to translated result
:param timecontext: time context associated with expr
:param kwargs: parameters passed as keyword args (e.g. window)
:return: translated PySpark DataFrame or Column object
"""
# The operation node type the typed expression wraps
op = expr.op()
result = scope.get_value(op, timecontext)
if result is not None:
return result
elif type(op) in self._registry:
formatter = self._registry[type(op)]
result = formatter(self, expr, scope, timecontext, **kwargs)
scope.set_value(op, timecontext, result)
return result
else:
raise com.OperationNotDefinedError(
'No translation rule for {}'.format(type(op))
)
class PySparkDialect(SparkDialect):
translator = PySparkExprTranslator
compiles = PySparkExprTranslator.compiles
@compiles(PySparkTable)
def compile_datasource(t, expr, scope, timecontext):
op = expr.op()
name, _, client = op.args
return filter_by_time_context(client._session.table(name), timecontext)
@compiles(ops.SQLQueryResult)
def compile_sql_query_result(t, expr, scope, timecontext, **kwargs):
op = expr.op()
query, _, client = op.args
return client._session.sql(query)
def _can_be_replaced_by_column_name(column_expr, table):
"""
Return whether the given column_expr can be replaced by its literal
name, which is True when column_expr and table[column_expr.get_name()]
is semantically the same.
"""
# Each check below is necessary to distinguish a pure projection from
# other valid selections, such as a mutation that assigns a new column
# or changes the value of an existing column.
return (
isinstance(column_expr.op(), ops.TableColumn)
and column_expr.op().table == table
and column_expr.get_name() in table.schema()
and column_expr.op() == table[column_expr.get_name()].op()
)
@compiles(ops.Selection)
def compile_selection(t, expr, scope, timecontext, **kwargs):
op = expr.op()
# In selection, there could be multiple children that point to the
# same root table. e.g. window with different sizes on a table.
# We need to get the 'combined' time range that is a superset of every
# time context among child nodes, and pass this as context to
# source table to get all data within time context loaded.
arg_timecontexts = [
adjust_context(node.op(), timecontext)
for node in op.selections
if timecontext
]
adjusted_timecontext = combine_time_context(arg_timecontexts)
# If this is a sort or filter node, op.selections is empty
# in this case, we use the original timecontext
if not adjusted_timecontext:
adjusted_timecontext = timecontext
src_table = t.translate(op.table, scope, adjusted_timecontext)
col_in_selection_order = []
col_to_drop = []
result_table = src_table
for selection in op.selections:
if isinstance(selection, types.TableExpr):
col_in_selection_order.extend(selection.columns)
elif isinstance(selection, types.DestructColumn):
struct_col = t.translate(selection, scope, adjusted_timecontext)
# assign struct col and drop it later
# This is a work around to ensure that the struct_col
# is only executed once
struct_col_name = f"destruct_col_{guid()}"
result_table = result_table.withColumn(struct_col_name, struct_col)
col_to_drop.append(struct_col_name)
cols = [
result_table[struct_col_name][name].alias(name)
for name in selection.type().names
]
col_in_selection_order.extend(cols)
elif isinstance(selection, (types.ColumnExpr, types.ScalarExpr)):
# If the selection is a straightforward projection of a table
# column from the root table itself (i.e. excluding mutations and
# renames), we can get the selection name directly.
if _can_be_replaced_by_column_name(selection, op.table):
col_in_selection_order.append(selection.get_name())
else:
col = t.translate(
selection, scope, adjusted_timecontext
).alias(selection.get_name())
col_in_selection_order.append(col)
else:
raise NotImplementedError(
f"Unrecoginized type in selections: {type(selection)}"
)
if col_in_selection_order:
result_table = result_table[col_in_selection_order]
if col_to_drop:
result_table = result_table.drop(*col_to_drop)
for predicate in op.predicates:
col = t.translate(predicate, scope, timecontext)
# Due to an upstream Spark issue (SPARK-33057) we cannot
# directly use filter with a window operation. The workaround
# here is to assign a temporary column for the filter predicate,
# do the filtering, and then drop the temporary column.
filter_column = f'predicate_{guid()}'
result_table = result_table.withColumn(filter_column, col)
result_table = result_table.filter(F.col(filter_column))
result_table = result_table.drop(filter_column)
if op.sort_keys:
sort_cols = [
t.translate(key, scope, timecontext) for key in op.sort_keys
]
result_table = result_table.sort(*sort_cols)
return filter_by_time_context(
result_table, timecontext, adjusted_timecontext
)
@compiles(ops.SortKey)
def compile_sort_key(t, expr, scope, timecontext, **kwargs):
op = expr.op()
col = t.translate(op.expr, scope, timecontext)
if op.ascending:
return col.asc()
else:
return col.desc()
@compiles(ops.TableColumn)
def compile_column(t, expr, scope, timecontext, **kwargs):
op = expr.op()
table = t.translate(op.table, scope, timecontext)
return table[op.name]
@compiles(ops.DistinctColumn)
def compile_distinct(t, expr, scope, timecontext, **kwargs):
op = expr.op()
root_table_expr = op.root_tables()[0].to_expr()
src_table = t.translate(root_table_expr, scope, timecontext)
src_column_name = op.arg.get_name()
return src_table.select(src_column_name).distinct()[src_column_name]
@compiles(ops.SelfReference)
def compile_self_reference(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.table, scope, timecontext)
@compiles(ops.Cast)
def compile_cast(t, expr, scope, timecontext, **kwargs):
op = expr.op()
if isinstance(op.to, dtypes.Interval):
if isinstance(op.arg.op(), ops.Literal):
return interval(op.arg.op().value, op.to.unit)
else:
raise com.UnsupportedArgumentError(
'Casting to intervals is only supported for literals '
'in the PySpark backend. {} not allowed.'.format(type(op.arg))
)
if isinstance(op.to, dtypes.Array):
cast_type = ibis_array_dtype_to_spark_dtype(op.to)
else:
cast_type = ibis_dtype_to_spark_dtype(op.to)
src_column = t.translate(op.arg, scope, timecontext)
return src_column.cast(cast_type)
@compiles(ops.Limit)
def compile_limit(t, expr, scope, timecontext, **kwargs):
op = expr.op()
if op.offset != 0:
raise com.UnsupportedArgumentError(
'PySpark backend does not support non-zero offset is for '
'limit operation. Got offset {}.'.format(op.offset)
)
df = t.translate(op.table, scope, timecontext)
return df.limit(op.n)
@compiles(ops.And)
def compile_and(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) & t.translate(
op.right, scope, timecontext
)
@compiles(ops.Or)
def compile_or(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) | t.translate(
op.right, scope, timecontext
)
@compiles(ops.Equals)
def compile_equals(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) == t.translate(
op.right, scope, timecontext
)
@compiles(ops.Not)
def compile_not(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return ~t.translate(op.arg, scope, timecontext)
@compiles(ops.NotEquals)
def compile_not_equals(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) != t.translate(
op.right, scope, timecontext
)
@compiles(ops.Greater)
def compile_greater(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) > t.translate(
op.right, scope, timecontext
)
@compiles(ops.GreaterEqual)
def compile_greater_equal(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) >= t.translate(
op.right, scope, timecontext
)
@compiles(ops.Less)
def compile_less(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) < t.translate(
op.right, scope, timecontext
)
@compiles(ops.LessEqual)
def compile_less_equal(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) <= t.translate(
op.right, scope, timecontext
)
@compiles(ops.Multiply)
def compile_multiply(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) * t.translate(
op.right, scope, timecontext
)
@compiles(ops.Subtract)
def compile_subtract(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return t.translate(op.left, scope, timecontext) - t.translate(
op.right, scope, timecontext
)
@compiles(ops.Literal)
def compile_literal(t, expr, scope, timecontext, raw=False, **kwargs):
""" If raw is True, don't wrap the result with F.lit()
"""
value = expr.op().value
dtype = expr.op().dtype
if raw:
return value
if isinstance(dtype, dtypes.Interval):
# execute returns a Timedelta and value is nanoseconds
return execute(expr).value
if isinstance(value, collections.abc.Set):
# Don't wrap set with F.lit
if isinstance(value, frozenset):
# Spark doens't like frozenset
return set(value)
else:
return value
elif isinstance(value, list):
return F.array(*[F.lit(v) for v in value])
elif isinstance(value, np.ndarray):
# Unpack np.generic's using .item(), otherwise Spark
# will not accept
return F.array(*[F.lit(v.item()) for v in value])
else:
return F.lit(value)
def _compile_agg(t, agg_expr, scope, timecontext, *, context, **kwargs):
agg = t.translate(agg_expr, scope, timecontext, context=context)
if agg_expr.has_name():
return agg.alias(agg_expr.get_name())
return agg
@compiles(ops.Aggregation)
def compile_aggregation(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_table = t.translate(op.table, scope, timecontext)
if op.by:
context = AggregationContext.GROUP
aggs = [
_compile_agg(t, m, scope, timecontext, context=context)
for m in op.metrics
]
bys = [t.translate(b, scope, timecontext) for b in op.by]
return src_table.groupby(*bys).agg(*aggs)
else:
context = AggregationContext.ENTIRE
aggs = [
_compile_agg(t, m, scope, timecontext, context=context)
for m in op.metrics
]
return src_table.agg(*aggs)
@compiles(ops.Union)
def compile_union(t, expr, scope, timecontext, **kwargs):
op = expr.op()
result = t.translate(op.left, scope, timecontext).union(
t.translate(op.right, scope, timecontext)
)
return result.distinct() if op.distinct else result
@compiles(ops.Contains)
def compile_contains(t, expr, scope, timecontext, **kwargs):
op = expr.op()
col = t.translate(op.value, scope, timecontext)
return col.isin(t.translate(op.options, scope, timecontext))
@compiles(ops.NotContains)
def compile_not_contains(t, expr, scope, timecontext, **kwargs):
op = expr.op()
col = t.translate(op.value, scope, timecontext)
return ~(col.isin(t.translate(op.options, scope, timecontext)))
def compile_aggregator(
t, expr, scope, timecontext, *, fn, context=None, **kwargs
):
op = expr.op()
src_col = t.translate(op.arg, scope, timecontext)
if getattr(op, 'where', None) is not None:
condition = t.translate(op.where, scope, timecontext)
src_col = F.when(condition, src_col)
col = fn(src_col)
if context:
return col
else:
# We are trying to compile a expr such as some_col.max()
# to a Spark expression.
# Here we get the root table df of that column and compile
# the expr to:
# df.select(max(some_col))
return t.translate(
expr.op().arg.op().table, scope, timecontext
).select(col)
@compiles(ops.GroupConcat)
def compile_group_concat(t, expr, scope, timecontext, context=None, **kwargs):
sep = expr.op().sep.op().value
def fn(col):
return F.concat_ws(sep, F.collect_list(col))
return compile_aggregator(
t, expr, scope, timecontext, fn=fn, context=context
)
@compiles(ops.Any)
def compile_any(t, expr, scope, timecontext, context=None, **kwargs):
return compile_aggregator(
t, expr, scope, timecontext, fn=F.max, context=context, **kwargs
)
@compiles(ops.NotAny)
def compile_notany(t, expr, scope, timecontext, *, context=None, **kwargs):
# The code here is a little ugly because the translation are different
# with different context.
# When translating col.notany() (context is None), we returns the dataframe
# so we need to negate the aggregator, i.e., df.select(~F.max(col))
# When traslating col.notany().over(w), we need to negate the result
# after the window translation, i.e., ~(F.max(col).over(w))
if context is None:
def fn(col):
return ~(F.max(col))
return compile_aggregator(
t, expr, scope, timecontext, fn=fn, context=context, **kwargs
)
else:
return ~compile_any(
t, expr, scope, timecontext, context=context, **kwargs,
)
@compiles(ops.All)
def compile_all(t, expr, scope, timecontext, context=None, **kwargs):
return compile_aggregator(
t, expr, scope, timecontext, fn=F.min, context=context, **kwargs
)
@compiles(ops.NotAll)
def compile_notall(t, expr, scope, timecontext, *, context=None, **kwargs):
# See comments for opts.NotAny for reasoning for the if/else
if context is None:
def fn(col):
return ~(F.min(col))
return compile_aggregator(
t, expr, scope, timecontext, fn=fn, context=context, **kwargs
)
else:
return ~compile_all(
t, expr, scope, timecontext, context=context, **kwargs,
)
@compiles(ops.Count)
def compile_count(t, expr, scope, timecontext, context=None, **kwargs):
return compile_aggregator(
t, expr, scope, timecontext, fn=F.count, context=context, **kwargs
)
@compiles(ops.Max)
@compiles(ops.CumulativeMax)
def compile_max(t, expr, scope, timecontext, context=None, **kwargs):
return compile_aggregator(
t, expr, scope, timecontext, fn=F.max, context=context, **kwargs
)
@compiles(ops.Min)
@compiles(ops.CumulativeMin)
def compile_min(t, expr, scope, timecontext, context=None, **kwargs):
return compile_aggregator(
t, expr, scope, timecontext, fn=F.min, context=context, **kwargs
)
@compiles(ops.Mean)
@compiles(ops.CumulativeMean)
def compile_mean(t, expr, scope, timecontext, context=None, **kwargs):
return compile_aggregator(
t, expr, scope, timecontext, fn=F.mean, context=context, **kwargs
)
@compiles(ops.Sum)
@compiles(ops.CumulativeSum)
def compile_sum(t, expr, scope, timecontext, context=None, **kwargs):
return compile_aggregator(
t, expr, scope, timecontext, fn=F.sum, context=context, **kwargs
)
@compiles(ops.StandardDev)
def compile_std(t, expr, scope, timecontext, context=None, **kwargs):
how = expr.op().how
if how == 'sample':
fn = F.stddev_samp
elif how == 'pop':
fn = F.stddev_pop
else:
raise com.TranslationError(
"Unexpected 'how' in translation: {}".format(how)
)
return compile_aggregator(
t, expr, scope, timecontext, fn=fn, context=context
)
@compiles(ops.Variance)
def compile_variance(t, expr, scope, timecontext, context=None, **kwargs):
how = expr.op().how
if how == 'sample':
fn = F.var_samp
elif how == 'pop':
fn = F.var_pop
else:
raise com.TranslationError(
"Unexpected 'how' in translation: {}".format(how)
)
return compile_aggregator(
t, expr, scope, timecontext, fn=fn, context=context
)
@compiles(ops.Arbitrary)
def compile_arbitrary(t, expr, scope, timecontext, context=None, **kwargs):
how = expr.op().how
if how == 'first':
fn = functools.partial(F.first, ignorenulls=True)
elif how == 'last':
fn = functools.partial(F.last, ignorenulls=True)
else:
raise NotImplementedError("Does not support 'how': {}".format(how))
return compile_aggregator(
t, expr, scope, timecontext, fn=fn, context=context
)
@compiles(ops.Greatest)
def compile_greatest(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_columns = t.translate(op.arg, scope, timecontext)
if len(src_columns) == 1:
return src_columns[0]
else:
return F.greatest(*src_columns)
@compiles(ops.Least)
def compile_least(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_columns = t.translate(op.arg, scope, timecontext)
if len(src_columns) == 1:
return src_columns[0]
else:
return F.least(*src_columns)
@compiles(ops.Abs)
def compile_abs(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.abs(src_column)
@compiles(ops.Round)
def compile_round(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
scale = (
t.translate(op.digits, scope, timecontext, raw=True)
if op.digits is not None
else 0
)
rounded = F.round(src_column, scale=scale)
if scale == 0:
rounded = rounded.astype('long')
return rounded
@compiles(ops.Ceil)
def compile_ceil(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.ceil(src_column)
@compiles(ops.Floor)
def compile_floor(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.floor(src_column)
@compiles(ops.Exp)
def compile_exp(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.exp(src_column)
@compiles(ops.Sign)
def compile_sign(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.when(src_column == 0, F.lit(0.0)).otherwise(
F.when(src_column > 0, F.lit(1.0)).otherwise(-1.0)
)
@compiles(ops.Sqrt)
def compile_sqrt(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.sqrt(src_column)
@compiles(ops.Log)
def compile_log(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
# Spark log method only takes float
return F.log(
float(t.translate(op.base, scope, timecontext, raw=True)), src_column
)
@compiles(ops.Ln)
def compile_ln(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.log(src_column)
@compiles(ops.Log2)
def compile_log2(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.log2(src_column)
@compiles(ops.Log10)
def compile_log10(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.log10(src_column)
@compiles(ops.Modulus)
def compile_modulus(t, expr, scope, timecontext, **kwargs):
op = expr.op()
left = t.translate(op.left, scope, timecontext)
right = t.translate(op.right, scope, timecontext)
return left % right
@compiles(ops.Negate)
def compile_negate(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return -src_column
@compiles(ops.Add)
def compile_add(t, expr, scope, timecontext, **kwargs):
op = expr.op()
left = t.translate(op.left, scope, timecontext)
right = t.translate(op.right, scope, timecontext)
return left + right
@compiles(ops.Divide)
def compile_divide(t, expr, scope, timecontext, **kwargs):
op = expr.op()
left = t.translate(op.left, scope, timecontext)
right = t.translate(op.right, scope, timecontext)
return left / right
@compiles(ops.FloorDivide)
def compile_floor_divide(t, expr, scope, timecontext, **kwargs):
op = expr.op()
left = t.translate(op.left, scope, timecontext)
right = t.translate(op.right, scope, timecontext)
return F.floor(left / right)
@compiles(ops.Power)
def compile_power(t, expr, scope, timecontext, **kwargs):
op = expr.op()
left = t.translate(op.left, scope, timecontext)
right = t.translate(op.right, scope, timecontext)
return F.pow(left, right)
@compiles(ops.IsNan)
def compile_isnan(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.isnan(src_column)
@compiles(ops.IsInf)
def compile_isinf(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return (src_column == float('inf')) | (src_column == float('-inf'))
@compiles(ops.Uppercase)
def compile_uppercase(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.upper(src_column)
@compiles(ops.Lowercase)
def compile_lowercase(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.lower(src_column)
@compiles(ops.Reverse)
def compile_reverse(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.reverse(src_column)
@compiles(ops.Strip)
def compile_strip(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.trim(src_column)
@compiles(ops.LStrip)
def compile_lstrip(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.ltrim(src_column)
@compiles(ops.RStrip)
def compile_rstrip(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.rtrim(src_column)
@compiles(ops.Capitalize)
def compile_capitalize(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.initcap(src_column)
@compiles(ops.Substring)
def compile_substring(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
start = t.translate(op.start, scope, timecontext, raw=True) + 1
length = t.translate(op.length, scope, timecontext, raw=True)
if isinstance(start, pyspark.sql.Column) or isinstance(
length, pyspark.sql.Column
):
raise NotImplementedError(
"Specifiying Start and length with column expressions "
"are not supported."
)
return src_column.substr(start, length)
@compiles(ops.StringLength)
def compile_string_length(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.length(src_column)
@compiles(ops.StrRight)
def compile_str_right(t, expr, scope, timecontext, **kwargs):
op = expr.op()
@F.udf('string')
def str_right(s, nchars):
return s[-nchars:]
src_column = t.translate(op.arg, scope, timecontext)
nchars_column = t.translate(op.nchars, scope, timecontext)
return str_right(src_column, nchars_column)
@compiles(ops.Repeat)
def compile_repeat(t, expr, scope, timecontext, **kwargs):
op = expr.op()
@F.udf('string')
def repeat(s, times):
return s * times
src_column = t.translate(op.arg, scope, timecontext)
times_column = t.translate(op.times, scope, timecontext)
return repeat(src_column, times_column)
@compiles(ops.StringFind)
def compile_string_find(t, expr, scope, timecontext, **kwargs):
op = expr.op()
@F.udf('long')
def str_find(s, substr, start, end):
return s.find(substr, start, end)
src_column = t.translate(op.arg, scope, timecontext)
substr_column = t.translate(op.substr, scope, timecontext)
start_column = (
t.translate(op.start, scope, timecontext) if op.start else F.lit(None)
)
end_column = (
t.translate(op.end, scope, timecontext) if op.end else F.lit(None)
)
return str_find(src_column, substr_column, start_column, end_column)
@compiles(ops.Translate)
def compile_translate(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
from_str = op.from_str.op().value
to_str = op.to_str.op().value
return F.translate(src_column, from_str, to_str)
@compiles(ops.LPad)
def compile_lpad(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
length = op.length.op().value
pad = op.pad.op().value
return F.lpad(src_column, length, pad)
@compiles(ops.RPad)
def compile_rpad(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
length = op.length.op().value
pad = op.pad.op().value
return F.rpad(src_column, length, pad)
@compiles(ops.StringJoin)
def compile_string_join(t, expr, scope, timecontext, **kwargs):
op = expr.op()
@F.udf('string')
def join(sep, arr):
return sep.join(arr)
sep_column = t.translate(op.sep, scope, timecontext)
arg = t.translate(op.arg, scope, timecontext)
return join(sep_column, F.array(arg))
@compiles(ops.RegexSearch)
def compile_regex_search(t, expr, scope, timecontext, **kwargs):
import re
op = expr.op()
@F.udf('boolean')
def regex_search(s, pattern):
return True if re.search(pattern, s) else False
src_column = t.translate(op.arg, scope, timecontext)
pattern = t.translate(op.pattern, scope, timecontext)
return regex_search(src_column, pattern)
@compiles(ops.RegexExtract)
def compile_regex_extract(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
pattern = op.pattern.op().value
idx = op.index.op().value
return F.regexp_extract(src_column, pattern, idx)
@compiles(ops.RegexReplace)
def compile_regex_replace(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
pattern = op.pattern.op().value
replacement = op.replacement.op().value
return F.regexp_replace(src_column, pattern, replacement)
@compiles(ops.StringReplace)
def compile_string_replace(t, expr, scope, timecontext, **kwargs):
return compile_regex_replace(t, expr)
@compiles(ops.StringSplit)
def compile_string_split(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
delimiter = op.delimiter.op().value
return F.split(src_column, delimiter)
@compiles(ops.StringConcat)
def compile_string_concat(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_columns = t.translate(op.arg, scope, timecontext)
return F.concat(*src_columns)
@compiles(ops.StringAscii)
def compile_string_ascii(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.ascii(src_column)
@compiles(ops.StringSQLLike)
def compile_string_like(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
pattern = op.pattern.op().value
return src_column.like(pattern)
@compiles(ops.ValueList)
def compile_value_list(t, expr, scope, timecontext, **kwargs):
op = expr.op()
return [t.translate(col, scope, timecontext) for col in op.values]
@compiles(ops.InnerJoin)
def compile_inner_join(t, expr, scope, timecontext, **kwargs):
return compile_join(t, expr, scope, timecontext, how='inner')
@compiles(ops.LeftJoin)
def compile_left_join(t, expr, scope, timecontext, **kwargs):
return compile_join(t, expr, scope, timecontext, how='left')
@compiles(ops.RightJoin)
def compile_right_join(t, expr, scope, timecontext, **kwargs):
return compile_join(t, expr, scope, timecontext, how='right')
@compiles(ops.OuterJoin)
def compile_outer_join(t, expr, scope, timecontext, **kwargs):
return compile_join(t, expr, scope, timecontext, how='outer')
def compile_join(t, expr, scope, timecontext, *, how):
op = expr.op()
left_df = t.translate(op.left, scope, timecontext)
right_df = t.translate(op.right, scope, timecontext)
pred_columns = []
for pred in op.predicates:
pred_op = pred.op()
if not isinstance(pred_op, ops.Equals):
raise NotImplementedError(
"Only equality predicate is supported, but got {}".format(
type(pred_op)
)
)
pred_columns.append(pred_op.left.get_name())
return left_df.join(right_df, pred_columns, how)
def _canonicalize_interval(t, interval, scope, timecontext, **kwargs):
""" Convert interval to integer timestamp of second
When pyspark cast timestamp to integer type, it uses the number of seconds
since epoch. Therefore, we need cast ibis interval correspondingly.
"""
if isinstance(interval, ir.IntervalScalar):
value = t.translate(interval, scope, timecontext, **kwargs)
# value is in nanoseconds and spark uses seconds since epoch
return int(value / 1e9)
elif isinstance(interval, int):
return interval
raise com.UnsupportedOperationError(
f'type {type(interval)} is not supported in preceding /following '
'in window.'
)
@compiles(ops.WindowOp)
def compile_window_op(t, expr, scope, timecontext, **kwargs):
op = expr.op()
window = op.window
operand = op.expr
group_by = window._group_by
grouping_keys = [
key_op.name
if isinstance(key_op, ops.TableColumn)
else t.translate(key, scope, timecontext)
for key, key_op in zip(
group_by, map(operator.methodcaller('op'), group_by)
)
]
order_by = window._order_by
# Timestamp needs to be cast to long for window bounds in spark
ordering_keys = [
F.col(sort_expr.get_name()).cast('long')
if isinstance(sort_expr.op().expr, types.TimestampColumn)
else sort_expr.get_name()
for sort_expr in order_by
]
context = AggregationContext.WINDOW
pyspark_window = Window.partitionBy(grouping_keys).orderBy(ordering_keys)
# If the operand is a shift op (e.g. lead, lag), Spark will set the window
# bounds. Only set window bounds here if not a shift operation.
if not isinstance(operand.op(), ops.ShiftBase):
if window.preceding is None:
start = Window.unboundedPreceding
else:
start = -_canonicalize_interval(
t, window.preceding, scope, timecontext, **kwargs
)
if window.following is None:
end = Window.unboundedFollowing
else:
end = _canonicalize_interval(
t, window.following, scope, timecontext, **kwargs
)
if isinstance(window.preceding, ir.IntervalScalar) or isinstance(
window.following, ir.IntervalScalar
):
pyspark_window = pyspark_window.rangeBetween(start, end)
else:
pyspark_window = pyspark_window.rowsBetween(start, end)
res_op = operand.op()
if isinstance(res_op, (ops.NotAll, ops.NotAny)):
# For NotAll and NotAny, negation must be applied after .over(window)
# Here we rewrite node to be its negation, and negate it back after
# translation and window operation
operand = res_op.negate().to_expr()
result = t.translate(operand, scope, timecontext, context=context).over(
pyspark_window
)
if isinstance(res_op, (ops.NotAll, ops.NotAny)):
return ~result
elif isinstance(res_op, (ops.MinRank, ops.DenseRank, ops.RowNumber)):
# result must be cast to long type for Rank / RowNumber
return result.astype('long') - 1
else:
return result
def _handle_shift_operation(t, expr, scope, timecontext, *, fn, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
default = op.default.op().value if op.default is not None else op.default
offset = op.offset.op().value if op.offset is not None else op.offset
if offset:
return fn(src_column, count=offset, default=default)
else:
return fn(src_column, default=default)
@compiles(ops.Lag)
def compile_lag(t, expr, scope, timecontext, **kwargs):
return _handle_shift_operation(
t, expr, scope, timecontext, fn=F.lag, **kwargs
)
@compiles(ops.Lead)
def compile_lead(t, expr, scope, timecontext, **kwargs):
return _handle_shift_operation(
t, expr, scope, timecontext, fn=F.lead, **kwargs
)
@compiles(ops.MinRank)
def compile_rank(t, expr, scope, timecontext, **kwargs):
return F.rank()
@compiles(ops.DenseRank)
def compile_dense_rank(t, expr, scope, timecontext, **kwargs):
return F.dense_rank()
@compiles(ops.PercentRank)
def compile_percent_rank(t, expr, scope, timecontext, **kwargs):
raise com.UnsupportedOperationError(
'Pyspark percent_rank() function indexes from 0 '
'instead of 1, and does not match expected '
'output of ibis expressions.'
)
@compiles(ops.NTile)
def compile_ntile(t, expr, scope, timecontext, **kwargs):
op = expr.op()
buckets = op.buckets.op().value
return F.ntile(buckets)
@compiles(ops.FirstValue)
def compile_first_value(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.first(src_column)
@compiles(ops.LastValue)
def compile_last_value(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.last(src_column)
@compiles(ops.RowNumber)
def compile_row_number(t, expr, scope, timecontext, **kwargs):
return F.row_number()
# -------------------------- Temporal Operations ----------------------------
# Ibis value to PySpark value
_time_unit_mapping = {
'Y': 'year',
'Q': 'quarter',
'M': 'month',
'W': 'week',
'D': 'day',
'h': 'hour',
'm': 'minute',
's': 'second',
}
@compiles(ops.Date)
def compile_date(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.to_date(src_column).cast('timestamp')
def _extract_component_from_datetime(
t, expr, scope, timecontext, *, extract_fn, **kwargs
):
op = expr.op()
date_col = t.translate(op.arg, scope, timecontext)
return extract_fn(date_col)
@compiles(ops.ExtractYear)
def compile_extract_year(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.year, **kwargs
)
@compiles(ops.ExtractMonth)
def compile_extract_month(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.month, **kwargs
)
@compiles(ops.ExtractDay)
def compile_extract_day(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.dayofmonth, **kwargs
)
@compiles(ops.ExtractDayOfYear)
def compile_extract_day_of_year(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.dayofyear, **kwargs
)
@compiles(ops.ExtractQuarter)
def compile_extract_quarter(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.quarter, **kwargs
)
@compiles(ops.ExtractEpochSeconds)
def compile_extract_epoch_seconds(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.unix_timestamp, **kwargs
)
@compiles(ops.ExtractWeekOfYear)
def compile_extract_week_of_year(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.weekofyear, **kwargs
)
@compiles(ops.ExtractHour)
def compile_extract_hour(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.hour, **kwargs
)
@compiles(ops.ExtractMinute)
def compile_extract_minute(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.minute, **kwargs
)
@compiles(ops.ExtractSecond)
def compile_extract_second(t, expr, scope, timecontext, **kwargs):
return _extract_component_from_datetime(
t, expr, scope, timecontext, extract_fn=F.second, **kwargs
)
@compiles(ops.ExtractMillisecond)
def compile_extract_millisecond(t, expr, scope, timecontext, **kwargs):
raise com.UnsupportedOperationError(
'PySpark backend does not support extracting milliseconds.'
)
@compiles(ops.DateTruncate)
def compile_date_truncate(t, expr, scope, timecontext, **kwargs):
op = expr.op()
try:
unit = _time_unit_mapping[op.unit]
except KeyError:
raise com.UnsupportedOperationError(
'{!r} unit is not supported in timestamp truncate'.format(op.unit)
)
src_column = t.translate(op.arg, scope, timecontext)
return F.date_trunc(unit, src_column)
@compiles(ops.TimestampTruncate)
def compile_timestamp_truncate(t, expr, scope, timecontext, **kwargs):
return compile_date_truncate(t, expr, scope, timecontext, **kwargs)
@compiles(ops.Strftime)
def compile_strftime(t, expr, scope, timecontext, **kwargs):
op = expr.op()
format_str = op.format_str.op().value
@pandas_udf('string', PandasUDFType.SCALAR)
def strftime(timestamps):
return timestamps.dt.strftime(format_str)
src_column = t.translate(op.arg, scope, timecontext)
return strftime(src_column)
@compiles(ops.TimestampFromUNIX)
def compile_timestamp_from_unix(t, expr, scope, timecontext, **kwargs):
op = expr.op()
unixtime = t.translate(op.arg, scope, timecontext)
if not op.unit:
return F.to_timestamp(F.from_unixtime(unixtime))
elif op.unit == 's':
fmt = 'yyyy-MM-dd HH:mm:ss'
return F.to_timestamp(F.from_unixtime(unixtime, fmt), fmt)
else:
raise com.UnsupportedArgumentError(
'PySpark backend does not support timestamp from unix time with '
'unit {}. Supported unit is s.'.format(op.unit)
)
@compiles(ops.TimestampNow)
def compile_timestamp_now(t, expr, scope, timecontext, **kwargs):
return F.current_timestamp()
@compiles(ops.StringToTimestamp)
def compile_string_to_timestamp(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
fmt = op.format_str.op().value
if op.timezone is not None and op.timezone.op().value != "UTC":
raise com.UnsupportedArgumentError(
'PySpark backend only supports timezone UTC for converting string '
'to timestamp.'
)
return F.to_timestamp(src_column, fmt)
@compiles(ops.DayOfWeekIndex)
def compile_day_of_week_index(t, expr, scope, timecontext, **kwargs):
op = expr.op()
@pandas_udf('short', PandasUDFType.SCALAR)
def day_of_week(s):
return s.dt.dayofweek
src_column = t.translate(op.arg, scope, timecontext)
return day_of_week(src_column.cast('timestamp'))
@compiles(ops.DayOfWeekName)
def compiles_day_of_week_name(t, expr, scope, timecontext, **kwargs):
op = expr.op()
@pandas_udf('string', PandasUDFType.SCALAR)
def day_name(s):
return s.dt.day_name()
src_column = t.translate(op.arg, scope, timecontext)
return day_name(src_column.cast('timestamp'))
def _get_interval_col(
t, interval_ibis_expr, scope, timecontext, allowed_units=None
):
# if interval expression is a binary op, translate expression into
# an interval column and return
if isinstance(interval_ibis_expr.op(), ops.IntervalBinaryOp):
return t.translate(interval_ibis_expr, scope, timecontext)
# otherwise, translate expression into a literal op and construct
# interval column from literal value and dtype
if isinstance(interval_ibis_expr.op(), ops.Literal):
op = interval_ibis_expr.op()
else:
op = t.translate(interval_ibis_expr, scope, timecontext).op()
dtype = op.dtype
if not isinstance(dtype, dtypes.Interval):
raise com.UnsupportedArgumentError(
'{} expression cannot be converted to interval column. '
'Must be Interval dtype.'.format(dtype)
)
if allowed_units and dtype.unit not in allowed_units:
raise com.UnsupportedArgumentError(
'Interval unit "{}" is not allowed. Allowed units are: '
'{}'.format(dtype.unit, allowed_units)
)
return F.expr(
'INTERVAL {} {}'.format(op.value, _time_unit_mapping[dtype.unit])
)
def _compile_datetime_binop(
t, expr, scope, timecontext, *, fn, allowed_units, **kwargs
):
op = expr.op()
left = t.translate(op.left, scope, timecontext)
right = _get_interval_col(t, op.right, scope, timecontext, allowed_units)
return fn(left, right)
@compiles(ops.DateAdd)
def compile_date_add(t, expr, scope, timecontext, **kwargs):
allowed_units = ['Y', 'W', 'M', 'D']
return _compile_datetime_binop(
t,
expr,
scope,
timecontext,
fn=(lambda l, r: (l + r).cast('timestamp')),
allowed_units=allowed_units,
**kwargs,
)
@compiles(ops.DateSub)
def compile_date_sub(t, expr, scope, timecontext, **kwargs):
allowed_units = ['Y', 'W', 'M', 'D']
return _compile_datetime_binop(
t,
expr,
scope,
timecontext,
fn=(lambda l, r: (l - r).cast('timestamp')),
allowed_units=allowed_units,
**kwargs,
)
@compiles(ops.DateDiff)
def compile_date_diff(t, expr, scope, timecontext, **kwargs):
raise com.UnsupportedOperationError(
'PySpark backend does not support DateDiff as there is no '
'timedelta type.'
)
@compiles(ops.TimestampAdd)
def compile_timestamp_add(t, expr, scope, timecontext, **kwargs):
allowed_units = ['Y', 'W', 'M', 'D', 'h', 'm', 's']
return _compile_datetime_binop(
t,
expr,
scope,
timecontext,
fn=(lambda l, r: (l + r).cast('timestamp')),
allowed_units=allowed_units,
**kwargs,
)
@compiles(ops.TimestampSub)
def compile_timestamp_sub(t, expr, scope, timecontext, **kwargs):
allowed_units = ['Y', 'W', 'M', 'D', 'h', 'm', 's']
return _compile_datetime_binop(
t,
expr,
scope,
timecontext,
fn=(lambda l, r: (l - r).cast('timestamp')),
allowed_units=allowed_units,
**kwargs,
)
@compiles(ops.TimestampDiff)
def compile_timestamp_diff(t, expr, scope, timecontext, **kwargs):
raise com.UnsupportedOperationError(
'PySpark backend does not support TimestampDiff as there is no '
'timedelta type.'
)
def _compile_interval_binop(t, expr, scope, timecontext, *, fn, **kwargs):
op = expr.op()
left = _get_interval_col(t, op.left, scope, timecontext)
right = _get_interval_col(t, op.right, scope, timecontext)
return fn(left, right)
@compiles(ops.IntervalAdd)
def compile_interval_add(t, expr, scope, timecontext, **kwargs):
return _compile_interval_binop(
t, expr, scope, timecontext, fn=(lambda l, r: l + r), **kwargs
)
@compiles(ops.IntervalSubtract)
def compile_interval_subtract(t, expr, scope, timecontext, **kwargs):
return _compile_interval_binop(
t, expr, scope, timecontext, fn=(lambda l, r: l - r), **kwargs
)
@compiles(ops.IntervalFromInteger)
def compile_interval_from_integer(t, expr, scope, timecontext, **kwargs):
raise com.UnsupportedOperationError(
'Interval from integer column is unsupported for the PySpark backend.'
)
# -------------------------- Array Operations ----------------------------
@compiles(ops.ArrayColumn)
def compile_array_column(t, expr, scope, timecontext, **kwargs):
op = expr.op()
cols = t.translate(op.cols, scope, timecontext)
return F.array(cols)
@compiles(ops.ArrayLength)
def compile_array_length(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.size(src_column)
@compiles(ops.ArraySlice)
def compile_array_slice(t, expr, scope, timecontext, **kwargs):
op = expr.op()
start = op.start.op().value if op.start is not None else op.start
stop = op.stop.op().value if op.stop is not None else op.stop
spark_type = ibis_array_dtype_to_spark_dtype(op.arg.type())
@F.udf(spark_type)
def array_slice(array):
return array[start:stop]
src_column = t.translate(op.arg, scope, timecontext)
return array_slice(src_column)
@compiles(ops.ArrayIndex)
def compile_array_index(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
index = op.index.op().value + 1
return F.element_at(src_column, index)
@compiles(ops.ArrayConcat)
def compile_array_concat(t, expr, scope, timecontext, **kwargs):
op = expr.op()
left = t.translate(op.left, scope, timecontext)
right = t.translate(op.right, scope, timecontext)
return F.concat(left, right)
@compiles(ops.ArrayRepeat)
def compile_array_repeat(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
times = op.times.op().value
return F.flatten(F.array_repeat(src_column, times))
@compiles(ops.ArrayCollect)
def compile_array_collect(t, expr, scope, timecontext, **kwargs):
op = expr.op()
src_column = t.translate(op.arg, scope, timecontext)
return F.collect_list(src_column)
# --------------------------- Null Operations -----------------------------
@compiles(ops.NullLiteral)
def compile_null_literal(t, expr, scope, timecontext, **kwargs):
return F.lit(None)
@compiles(ops.IfNull)
def compile_if_null(t, expr, scope, timecontext, **kwargs):
op = expr.op()
col = t.translate(op.arg, scope, timecontext)
ifnull_col = t.translate(op.ifnull_expr, scope, timecontext)
return F.when(col.isNull(), ifnull_col).otherwise(col)
@compiles(ops.NullIf)
def compile_null_if(t, expr, scope, timecontext, **kwargs):
op = expr.op()
col = t.translate(op.arg, scope, timecontext)
nullif_col = t.translate(op.null_if_expr, scope, timecontext)
return F.when(col == nullif_col, F.lit(None)).otherwise(col)
@compiles(ops.IsNull)
def compile_is_null(t, expr, scope, timecontext, **kwargs):
op = expr.op()
col = t.translate(op.arg, scope, timecontext)
return F.isnull(col) | F.isnan(col)
@compiles(ops.NotNull)
def compile_not_null(t, expr, scope, timecontext, **kwargs):
op = expr.op()
col = t.translate(op.arg, scope, timecontext)
return ~F.isnull(col) & ~F.isnan(col)
# ------------------------- User defined function ------------------------
def _wrap_struct_func(func, output_cols):
@functools.wraps(func)
def wrapped(*args, **kwargs):
result = func(*args, **kwargs)
return coerce_to_dataframe(result, output_cols)
return wrapped
@compiles(ops.ElementWiseVectorizedUDF)
def compile_elementwise_udf(t, expr, scope, timecontext, **kwargs):
op = expr.op()
spark_output_type = spark_dtype(op._output_type)
if isinstance(expr, (types.StructColumn, types.DestructColumn)):
func = _wrap_struct_func(op.func, spark_output_type.names)
else:
func = op.func
spark_udf = pandas_udf(func, spark_output_type, PandasUDFType.SCALAR)
func_args = (t.translate(arg, scope, timecontext) for arg in op.func_args)
return spark_udf(*func_args)
@compiles(ops.ReductionVectorizedUDF)
def compile_reduction_udf(t, expr, scope, timecontext, context=None, **kwargs):
op = expr.op()
spark_output_type = spark_dtype(op._output_type)
spark_udf = pandas_udf(
op.func, spark_output_type, PandasUDFType.GROUPED_AGG
)
func_args = (t.translate(arg, scope, timecontext) for arg in op.func_args)
col = spark_udf(*func_args)
if context:
return col
else:
src_table = t.translate(op.func_args[0].op().table, scope, timecontext)
return src_table.agg(col)
@compiles(ops.SearchedCase)
def compile_searched_case(t, expr, scope, timecontext, **kwargs):
op = expr.op()
existing_when = None
for case, result in zip(op.cases, op.results):
if existing_when is not None:
# Spark allowed chained when statement
when = existing_when.when
else:
when = F.when
existing_when = when(
t.translate(case, scope, timecontext, **kwargs),
t.translate(result, scope, timecontext, **kwargs),
)
return existing_when.otherwise(
t.translate(op.default, scope, timecontext, **kwargs)
)
| 29.664234
| 79
| 0.668118
|
4504124ca6347f881ae36b0f3c8c43ede7587be0
| 123
|
py
|
Python
|
scrapyspider/run.py
|
yan-jin/douban_top250_scrapyspider
|
176c974a32cc0975b2ce6bede9cf201061761628
|
[
"MIT"
] | null | null | null |
scrapyspider/run.py
|
yan-jin/douban_top250_scrapyspider
|
176c974a32cc0975b2ce6bede9cf201061761628
|
[
"MIT"
] | null | null | null |
scrapyspider/run.py
|
yan-jin/douban_top250_scrapyspider
|
176c974a32cc0975b2ce6bede9cf201061761628
|
[
"MIT"
] | null | null | null |
from scrapy import cmdline
name = 'douban_movie_top250'
cmd = 'scrapy crawl {}'.format(name)
cmdline.execute(cmd.split())
| 20.5
| 36
| 0.747967
|
ed8bcb6e7e5cfbb694dcdfa4955337592d0d31c8
| 4,398
|
py
|
Python
|
homeassistant/components/locative/__init__.py
|
jeanfpoulin/home-assistant
|
04dbe5bc841e1a429873efbd850c35b823ef26ce
|
[
"Apache-2.0"
] | 3
|
2020-05-18T10:18:16.000Z
|
2020-12-08T11:27:55.000Z
|
homeassistant/components/locative/__init__.py
|
jeanfpoulin/home-assistant
|
04dbe5bc841e1a429873efbd850c35b823ef26ce
|
[
"Apache-2.0"
] | 3
|
2021-02-08T20:54:46.000Z
|
2021-09-08T02:30:04.000Z
|
homeassistant/components/locative/__init__.py
|
jeanfpoulin/home-assistant
|
04dbe5bc841e1a429873efbd850c35b823ef26ce
|
[
"Apache-2.0"
] | 6
|
2020-04-10T06:21:11.000Z
|
2021-07-01T08:53:38.000Z
|
"""Support for Locative."""
import logging
from typing import Dict
import voluptuous as vol
from aiohttp import web
import homeassistant.helpers.config_validation as cv
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.const import (
HTTP_UNPROCESSABLE_ENTITY,
ATTR_LATITUDE,
ATTR_LONGITUDE,
STATE_NOT_HOME,
CONF_WEBHOOK_ID,
ATTR_ID,
HTTP_OK,
)
from homeassistant.helpers import config_entry_flow
from homeassistant.helpers.dispatcher import async_dispatcher_send
_LOGGER = logging.getLogger(__name__)
DOMAIN = "locative"
TRACKER_UPDATE = f"{DOMAIN}_tracker_update"
ATTR_DEVICE_ID = "device"
ATTR_TRIGGER = "trigger"
def _id(value: str) -> str:
"""Coerce id by removing '-'."""
return value.replace("-", "")
def _validate_test_mode(obj: Dict) -> Dict:
"""Validate that id is provided outside of test mode."""
if ATTR_ID not in obj and obj[ATTR_TRIGGER] != "test":
raise vol.Invalid("Location id not specified")
return obj
WEBHOOK_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
vol.Required(ATTR_DEVICE_ID): cv.string,
vol.Required(ATTR_TRIGGER): cv.string,
vol.Optional(ATTR_ID): vol.All(cv.string, _id),
},
extra=vol.ALLOW_EXTRA,
),
_validate_test_mode,
)
async def async_setup(hass, hass_config):
"""Set up the Locative component."""
hass.data[DOMAIN] = {"devices": set(), "unsub_device_tracker": {}}
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook from Locative."""
try:
data = WEBHOOK_SCHEMA(dict(await request.post()))
except vol.MultipleInvalid as error:
return web.Response(text=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY)
device = data[ATTR_DEVICE_ID]
location_name = data.get(ATTR_ID, data[ATTR_TRIGGER]).lower()
direction = data[ATTR_TRIGGER]
gps_location = (data[ATTR_LATITUDE], data[ATTR_LONGITUDE])
if direction == "enter":
async_dispatcher_send(hass, TRACKER_UPDATE, device, gps_location, location_name)
return web.Response(text=f"Setting location to {location_name}", status=HTTP_OK)
if direction == "exit":
current_state = hass.states.get(f"{DEVICE_TRACKER}.{device}")
if current_state is None or current_state.state == location_name:
location_name = STATE_NOT_HOME
async_dispatcher_send(
hass, TRACKER_UPDATE, device, gps_location, location_name
)
return web.Response(text="Setting location to not home", status=HTTP_OK)
# Ignore the message if it is telling us to exit a zone that we
# aren't currently in. This occurs when a zone is entered
# before the previous zone was exited. The enter message will
# be sent first, then the exit message will be sent second.
return web.Response(
text="Ignoring exit from {} (already in {})".format(
location_name, current_state
),
status=HTTP_OK,
)
if direction == "test":
# In the app, a test message can be sent. Just return something to
# the user to let them know that it works.
return web.Response(text="Received test message.", status=HTTP_OK)
_LOGGER.error("Received unidentified message from Locative: %s", direction)
return web.Response(
text=f"Received unidentified message: {direction}",
status=HTTP_UNPROCESSABLE_ENTITY,
)
async def async_setup_entry(hass, entry):
"""Configure based on config entry."""
hass.components.webhook.async_register(
DOMAIN, "Locative", entry.data[CONF_WEBHOOK_ID], handle_webhook
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, DEVICE_TRACKER)
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
hass.data[DOMAIN]["unsub_device_tracker"].pop(entry.entry_id)()
return await hass.config_entries.async_forward_entry_unload(entry, DEVICE_TRACKER)
# pylint: disable=invalid-name
async_remove_entry = config_entry_flow.webhook_async_remove_entry
| 32.577778
| 88
| 0.692133
|
44e5005e167dde44b963f3a92f3bd75c9021b2e5
| 386
|
py
|
Python
|
newsSpider/newsSpider/items.py
|
xunyangjian/spider
|
3d6cfa3d8e3a608b414bed920e4b408043d119a4
|
[
"MIT"
] | null | null | null |
newsSpider/newsSpider/items.py
|
xunyangjian/spider
|
3d6cfa3d8e3a608b414bed920e4b408043d119a4
|
[
"MIT"
] | null | null | null |
newsSpider/newsSpider/items.py
|
xunyangjian/spider
|
3d6cfa3d8e3a608b414bed920e4b408043d119a4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class NewsspiderItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
title = scrapy.Field() # 文章标题
link = scrapy.Field() # 文章链接
desc = scrapy.Field() # 文章描述
| 22.705882
| 51
| 0.65544
|
f8ea9b06f93217608c0ac1c709fab70e125ec784
| 4,660
|
py
|
Python
|
neuspell/corrector_lstmlstm.py
|
michael-conrad/neuspell
|
f1d1a8b4efa7c6aa6e0564ea17db152905f4c7dc
|
[
"MIT"
] | 1
|
2021-05-21T09:39:50.000Z
|
2021-05-21T09:39:50.000Z
|
neuspell/corrector_lstmlstm.py
|
michael-conrad/neuspell
|
f1d1a8b4efa7c6aa6e0564ea17db152905f4c7dc
|
[
"MIT"
] | null | null | null |
neuspell/corrector_lstmlstm.py
|
michael-conrad/neuspell
|
f1d1a8b4efa7c6aa6e0564ea17db152905f4c7dc
|
[
"MIT"
] | null | null | null |
import os
from typing import List
import torch
from .commons import spacy_tokenizer, DEFAULT_DATA_PATH, Corrector
from .seq_modeling.downloads import download_pretrained_model
from .seq_modeling.helpers import load_data, load_vocab_dict, get_model_nparams
from .seq_modeling.lstmlstm import load_model, load_pretrained, model_predictions, model_inference
""" corrector module """
class CorrectorLstmLstm(Corrector):
def __init__(self, tokenize=True, pretrained=False, device="cpu"):
super(CorrectorLstmLstm, self).__init__()
self.tokenize = tokenize
self.pretrained = pretrained
self.device = device
self.ckpt_path = f"{DEFAULT_DATA_PATH}/checkpoints/lstm-lstm-probwordnoise"
self.vocab_path, self.weights_path = "", ""
self.model, self.vocab = None, None
if self.pretrained:
self.from_pretrained(self.ckpt_path)
def __model_status(self):
assert not (self.model is None or self.vocab is None), print("model & vocab must be loaded first")
return
def from_pretrained(self, ckpt_path, vocab="", weights=""):
self.ckpt_path = ckpt_path
self.vocab_path = vocab if vocab else os.path.join(ckpt_path, "vocab.pkl")
if not os.path.isfile(self.vocab_path): # leads to "FileNotFoundError"
download_pretrained_model(self.ckpt_path)
print(f"loading vocab from path:{self.vocab_path}")
self.vocab = load_vocab_dict(self.vocab_path)
print(f"initializing model")
self.model = load_model(self.vocab)
self.weights_path = weights if weights else self.ckpt_path
print(f"loading pretrained weights from path:{self.weights_path}")
self.model = load_pretrained(self.model, self.weights_path, device=self.device)
return
def set_device(self, device='cpu'):
prev_device = self.device
device = "cuda" if (device == "gpu" and torch.cuda.is_available()) else "cpu"
if not (prev_device == device):
if self.model is not None:
# please load again, facing issues with just .to(new_device) and new_device
# not same the old device, https://tinyurl.com/y57pcjvd
self.from_pretrained(self.ckpt_path, vocab=self.vocab_path, weights=self.weights_path)
self.device = device
print(f"model set to work on {device}")
return
def correct(self, x):
return self.correct_string(x)
def correct_string(self, mystring: str, return_all=False) -> str:
x = self.correct_strings([mystring], return_all=return_all)
if return_all:
return x[0][0], x[1][0]
else:
return x[0]
def correct_strings(self, mystrings: List[str], return_all=False) -> List[str]:
self.__model_status()
if self.tokenize:
mystrings = [spacy_tokenizer(my_str) for my_str in mystrings]
data = [(line, line) for line in mystrings]
batch_size = 4 if self.device == "cpu" else 16
return_strings = model_predictions(self.model, data, self.vocab, device=self.device, batch_size=batch_size)
if return_all:
return mystrings, return_strings
else:
return return_strings
def correct_from_file(self, src, dest="./clean_version.txt"):
"""
src = f"{DEFAULT_DATA_PATH}/traintest/corrupt.txt"
"""
self.__model_status()
x = [line.strip() for line in open(src, 'r')]
y = self.correct_strings(x)
print(f"saving results at: {dest}")
opfile = open(dest, 'w')
for line in y:
opfile.write(line + "\n")
opfile.close()
return
def evaluate(self, clean_file, corrupt_file):
"""
clean_file = f"{DEFAULT_DATA_PATH}/traintest/clean.txt"
corrupt_file = f"{DEFAULT_DATA_PATH}/traintest/corrupt.txt"
"""
self.__model_status()
batch_size = 4 if self.device == "cpu" else 16
for x, y, z in zip([""], [clean_file], [corrupt_file]):
print(x, y, z)
test_data = load_data(x, y, z)
_ = model_inference(self.model,
test_data,
topk=1,
device=self.device,
batch_size=batch_size,
vocab_=self.vocab)
return
def model_size(self):
self.__model_status()
return get_model_nparams(self.model)
| 40.172414
| 116
| 0.606652
|
6bd421e45a1e9a98dbdede27df576170327de262
| 79,010
|
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_10_01/operations/_virtual_network_gateways_operations.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 8
|
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_10_01/operations/_virtual_network_gateways_operations.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | null | null | null |
sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_10_01/operations/_virtual_network_gateways_operations.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkGatewaysOperations(object):
"""VirtualNetworkGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> "models.VirtualNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.VirtualNetworkGateway"]
"""Creates or updates a virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to create or update virtual network gateway operation.
:type parameters: ~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.VirtualNetworkGateway"
"""Gets the specified virtual network gateway by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "models.VirtualNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.VirtualNetworkGateway"]
"""Updates a virtual network gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to update virtual network gateway tags.
:type parameters: ~azure.mgmt.network.v2017_10_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.VirtualNetworkGatewayListResult"]
"""Gets all virtual network gateways by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways'} # type: ignore
def list_connections(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.VirtualNetworkGatewayListConnectionsResult"]
"""Gets all the connections in a virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListConnectionsResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGatewayListConnectionsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGatewayListConnectionsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_connections.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListConnectionsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_connections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/connections'} # type: ignore
def _reset_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
gateway_vip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["models.VirtualNetworkGateway"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.VirtualNetworkGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self._reset_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if gateway_vip is not None:
query_parameters['gatewayVip'] = self._serialize.query("gateway_vip", gateway_vip, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def begin_reset(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
gateway_vip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.VirtualNetworkGateway"]
"""Resets the primary of the virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param gateway_vip: Virtual network gateway vip address supplied to the begin reset of the
active-active feature enabled gateway.
:type gateway_vip: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_10_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
gateway_vip=gateway_vip,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def _generatevpnclientpackage_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._generatevpnclientpackage_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevpnclientpackage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
def begin_generatevpnclientpackage(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Generates VPN client package for P2S client of the virtual network gateway in the specified
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2017_10_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._generatevpnclientpackage_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevpnclientpackage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
def _generate_vpn_profile_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._generate_vpn_profile_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generate_vpn_profile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
def begin_generate_vpn_profile(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Generates VPN profile for P2S client of the virtual network gateway in the specified resource
group. Used for IKEV2 and radius based authentication.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2017_10_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._generate_vpn_profile_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generate_vpn_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
def _get_vpn_profile_package_url_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional[str]
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_vpn_profile_package_url_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vpn_profile_package_url_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
def begin_get_vpn_profile_package_url(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Gets pre-generated VPN profile for P2S client of the virtual network gateway in the specified
resource group. The profile needs to be generated first using generateVpnProfile.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_vpn_profile_package_url_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vpn_profile_package_url.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
def _get_bgp_peer_status_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["models.BgpPeerStatusListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.BgpPeerStatusListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_bgp_peer_status_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if peer is not None:
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_bgp_peer_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
def begin_get_bgp_peer_status(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.BgpPeerStatusListResult"]
"""The GetBgpPeerStatus operation retrieves the status of all BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer to retrieve the status of.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either BgpPeerStatusListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_10_01.models.BgpPeerStatusListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.BgpPeerStatusListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_bgp_peer_status_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_bgp_peer_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
def supported_vpn_devices(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> str
"""Gets a xml format representation for supported vpn devices.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self.supported_vpn_devices.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
supported_vpn_devices.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/supportedvpndevices'} # type: ignore
def _get_learned_routes_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["models.GatewayRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_learned_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_learned_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
def begin_get_learned_routes(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.GatewayRouteListResult"]
"""This operation retrieves a list of routes the virtual network gateway has learned, including
routes learned from BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_10_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_learned_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_learned_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
def _get_advertised_routes_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["models.GatewayRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_advertised_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_advertised_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
def begin_get_advertised_routes(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.GatewayRouteListResult"]
"""This operation retrieves a list of routes the virtual network gateway is advertising to the
specified peer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2017_10_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_advertised_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_advertised_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
def vpn_device_configuration_script(
self,
resource_group_name, # type: str
virtual_network_gateway_connection_name, # type: str
parameters, # type: "models.VpnDeviceScriptParameters"
**kwargs # type: Any
):
# type: (...) -> str
"""Gets a xml format representation for vpn device configuration script.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection for which the configuration script is generated.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the generate vpn device script operation.
:type parameters: ~azure.mgmt.network.v2017_10_01.models.VpnDeviceScriptParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self.vpn_device_configuration_script.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnDeviceScriptParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
vpn_device_configuration_script.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/vpndeviceconfigurationscript'} # type: ignore
| 50.679923
| 248
| 0.673244
|
64da72d5286962e5dd983171f507cf09fe354986
| 4,736
|
py
|
Python
|
appBody.py
|
MichelVanderhulst/web-app-central
|
d572d10ff8d5b6cbf0a0fc191a397f004bce75cc
|
[
"MIT"
] | null | null | null |
appBody.py
|
MichelVanderhulst/web-app-central
|
d572d10ff8d5b6cbf0a0fc191a397f004bce75cc
|
[
"MIT"
] | null | null | null |
appBody.py
|
MichelVanderhulst/web-app-central
|
d572d10ff8d5b6cbf0a0fc191a397f004bce75cc
|
[
"MIT"
] | null | null | null |
# Dash app libraries
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
import base64
import dash_table
# Making table quickly
import pandas as pd
# Listing manually all the apps created
derivatives = ["European option",
"European option",
"Asian option",
"Exchange option"]
models = ["Black-Scholes-Merton",
"Cox-Ross Rubinstein",
"Cox-Ross Rubinstein" ,
"Black-Scholes-Merton"]
URLs = [html.A(html.P("eu-option-bsm.herokuapp.com"),href="https://eu-option-bsm.herokuapp.com", target="_blank"),
html.A(html.P('eu-option-crr.herokuapp.com'),href="https://eu-option-crr.herokuapp.com", target="_blank"),
html.A(html.P("asian-option-crr.herokuapp.com"),href='https://asian-option-crr.herokuapp.com', target="_blank"),
html.A(html.P("exchange-option-bsm.herokuapp.com"),href='http://exchange-option-bsm.herokuapp.com', target="_blank")]
authors = ["Michel Vanderhulst",
"Michel Vanderhulst",
"Michel Vanderhulst",
"Michel Vanderhulst"]
# Would be a nice addition, idk how to do it. I imagine getting from github the last commit date from each app?
lastupdated = ["2021/01/04","2021/02/11","2021/02/11","2021/02/14"]
# Building the table fromm all apps info
dictionary={"Derivative":derivatives,"Model":models,"URL":URLs,"Author":authors}
df=pd.DataFrame(dictionary)
# making Dash table out of pandas table
table=dbc.Table.from_dataframe(df, striped=True, bordered=True, hover=True)
# Creating the app body
def body():
return html.Div(children=[
html.Div(id='left-column', children=[
dcc.Tabs(
id='tabs', value='About this App',
children=[
dcc.Tab(
label='About this App',
value='About this App',
children=html.Div(children=[
html.Br(),
html.H4('What is this app?', style={"text-align":"center"}),
html.P(
"""
This app lists financial derivatives replication strategies web applications. Their goal is to illustrate through visuals the investment strategies that replicates the derivatives prices, i.e. proving they are arbitrage-free.
"""
),
html.Br(),
html.P(
"""
Note: the apps are turned off by default. Upon startup, it can take between 10 to 30 seconds to load. It will then run at full speed.
"""),
html.Br(),
html.Div(table)])
),
dcc.Tab(
label="Origin",
value="Origin",
children=[html.Div(children=[
html.Br(),
html.H4("Origin of apps and methodology", style={"text-align":"center"}),
html.P([
"""
The web applications were done by Michel Vanderhulst in 2020/2021 for his Master's Thesis under the supervision of Prof. Frédéric Vrins at the Louvain School of Management.
Their goal is for future students' thesis to continue updating and adding new derivatives' replication strategies.
"""]),
html.Br(),
html.P([
"""
The first four web apps have as support the written Master's thesis. The full mathematical proofs and explanations, along the applications' developments (and step-by-step methodology to build one) can be found there.
"""]),
html.Br(),
html.P(["The source code of the apps can be found at ", html.A("github.com/MichelVanderhulst",href="https://github.com/MichelVanderhulst?tab=repositories", target="_blank"),"."])
])]),
#
#
],),], style={"display":"flex", 'margin':"20px", 'transform':'translateX(+30%)', "width":"60%"}),
])
| 49.333333
| 261
| 0.5019
|
5f2755f38b02e01a2205b23ecbd4acd716b2fdcf
| 1,598
|
py
|
Python
|
var/spack/repos/builtin/packages/ioapi/package.py
|
LiamBindle/spack
|
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2,360
|
2017-11-06T08:47:01.000Z
|
2022-03-31T14:45:33.000Z
|
var/spack/repos/builtin/packages/ioapi/package.py
|
LiamBindle/spack
|
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 13,838
|
2017-11-04T07:49:45.000Z
|
2022-03-31T23:38:39.000Z
|
var/spack/repos/builtin/packages/ioapi/package.py
|
LiamBindle/spack
|
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1,793
|
2017-11-04T07:45:50.000Z
|
2022-03-30T14:31:53.000Z
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
class Ioapi(MakefilePackage):
"""Models-3/EDSS Input/Output Applications Programming Interface."""
homepage = "https://www.cmascenter.org/ioapi/"
url = "https://www.cmascenter.org/ioapi/download/ioapi-3.2.tar.gz"
version('3.2', sha256='0a3cbf236ffbd9fb5f6509e35308c3353f1f53096efe0c51b84883d2da86924b')
depends_on('netcdf-c@4:')
depends_on('netcdf-fortran@4:')
depends_on('sed', type='build')
def edit(self, spec, prefix):
# No default Makefile bundled; edit the template.
os.symlink('Makefile.template', 'Makefile')
# The makefile uses stubborn assignments of = instead of ?= so
# edit the makefile instead of using environmental variables.
makefile = FileFilter('Makefile')
makefile.filter('^BASEDIR.*', 'BASEDIR = ' + self.build_directory)
makefile.filter('^INSTALL.*', 'INSTALL = ' + prefix)
makefile.filter('^BININST.*', 'BININST = ' + prefix.bin)
makefile.filter('^LIBINST.*', 'LIBINST = ' + prefix.lib)
def install(self, spec, prefix):
make('install')
# Install the header files.
mkdirp(prefix.include.fixed132)
install('ioapi/*.EXT', prefix.include)
# Install the header files for CMAQ and SMOKE in the
# non-standard -ffixed-line-length-132 format.
install('ioapi/fixed_src/*.EXT', prefix.include.fixed132)
| 42.052632
| 93
| 0.670839
|
4cef8ebf9a319a43e6bef137aa8a124213e3a71c
| 165
|
py
|
Python
|
venv/bin/django-admin.py
|
justinburger/dinetta
|
a7cb9abd67167ee1be2cc4417e650ee1aae1afe3
|
[
"MIT"
] | null | null | null |
venv/bin/django-admin.py
|
justinburger/dinetta
|
a7cb9abd67167ee1be2cc4417e650ee1aae1afe3
|
[
"MIT"
] | 6
|
2020-06-05T23:19:48.000Z
|
2022-02-10T13:39:54.000Z
|
venv/bin/django-admin.py
|
justinburger/dinetta
|
a7cb9abd67167ee1be2cc4417e650ee1aae1afe3
|
[
"MIT"
] | null | null | null |
#!/Users/justinburger/Documents/dinetta/venv/bin/python3.7
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| 27.5
| 58
| 0.793939
|
90696b9ffc09244e6c8e1e5b4e1e7f6749eead64
| 2,342
|
py
|
Python
|
test/integration/tests/model_mapper/constant_tuple_then_float.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
test/integration/tests/model_mapper/constant_tuple_then_float.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
test/integration/tests/model_mapper/constant_tuple_then_float.py
|
AshKelly/PyAutoLens
|
043795966338a655339e61782253ad67cc3c14e6
|
[
"MIT"
] | null | null | null |
import os
from autofit import conf
from autofit.optimize import non_linear as nl
from autolens.data import ccd
from autolens.model.galaxy import galaxy, galaxy_model as gm
from autolens.pipeline import phase as ph
from autolens.pipeline import pipeline as pl
from autolens.model.profiles import light_profiles as lp
from test.integration import tools
test_type = 'model_mapper'
test_name = "constant_tuple_then_float"
path = '{}/../../'.format(os.path.dirname(os.path.realpath(__file__)))
output_path = path+'output/'+test_type
config_path = path+'config'
conf.instance = conf.Config(config_path=config_path, output_path=output_path)
def pipeline():
sersic = lp.EllipticalSersic(centre=(0.0, 0.0), axis_ratio=0.8, phi=90.0, intensity=1.0, effective_radius=1.3,
sersic_index=3.0)
lens_galaxy = galaxy.Galaxy(light_profile=sersic)
tools.reset_paths(test_name=test_name, output_path=output_path)
tools.simulate_integration_image(test_name=test_name, pixel_scale=0.1, lens_galaxies=[lens_galaxy],
source_galaxies=[], target_signal_to_noise=30.0)
ccd_data = ccd.load_ccd_data_from_fits(image_path=path + '/data/' + test_name + '/image.fits',
psf_path=path + '/data/' + test_name + '/psf.fits',
noise_map_path=path + '/data/' + test_name + '/noise_map.fits',
pixel_scale=0.1)
pipeline = make_pipeline(test_name=test_name)
pipeline.run(data=ccd_data)
def make_pipeline(test_name):
class MMPhase(ph.LensPlanePhase):
def pass_priors(self, previous_results):
self.lens_galaxies.lens.light.centre_0 = 1.0
self.lens_galaxies.lens.light.centre_1 = 2.0
self.lens_galaxies.lens.light.axis_ratio = 0.2
self.lens_galaxies.lens.light.phi = 90.0
phase1 = MMPhase(lens_galaxies=dict(lens=gm.GalaxyModel(light=lp.EllipticalSersic)),
optimizer_class=nl.MultiNest, phase_name="{}/phase1".format(test_name))
phase1.optimizer.const_efficiency_mode = True
phase1.optimizer.n_live_points = 20
phase1.optimizer.sampling_efficiency = 0.8
return pl.PipelineImaging(test_name, phase1)
if __name__ == "__main__":
pipeline()
| 37.174603
| 114
| 0.678907
|
83e13c5d67ad251c04924f0fb00a156bcc93dd07
| 3,622
|
py
|
Python
|
src/feeds/views.py
|
Talengi/phase
|
60ff6f37778971ae356c5b2b20e0d174a8288bfe
|
[
"MIT"
] | 8
|
2016-01-29T11:53:40.000Z
|
2020-03-02T22:42:02.000Z
|
src/feeds/views.py
|
Talengi/phase
|
60ff6f37778971ae356c5b2b20e0d174a8288bfe
|
[
"MIT"
] | 289
|
2015-03-23T07:42:52.000Z
|
2022-03-11T23:26:10.000Z
|
src/feeds/views.py
|
Talengi/phase
|
60ff6f37778971ae356c5b2b20e0d174a8288bfe
|
[
"MIT"
] | 7
|
2015-12-08T09:03:20.000Z
|
2020-05-11T15:36:51.000Z
|
# -*- coding: utf-8 -*-
from django.views.generic import TemplateView, ListView
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured
from django.contrib.auth.mixins import LoginRequiredMixin
from categories.views import CategoryMixin
from feeds import feeds
class AlertHome(LoginRequiredMixin, CategoryMixin, TemplateView):
"""Simply links to available feeds."""
template_name = 'feeds/alert_home.html'
def breadcrumb_section(self):
return (_('Feeds'), '#')
def breadcrumb_subsection(self):
return (self.category, reverse('category_feeds', args=[
self.category.organisation.slug,
self.category.slug
]))
class FeedConverterMixin(object):
"""Displays a Django Feed directly in html."""
feed_class = None
def dispatch(self, request, *args, **kwargs):
self.extract_feed()
return super(FeedConverterMixin, self).dispatch(request, *args, **kwargs)
def extract_feed(self):
"""Get the feed to display."""
if self.feed_class is None:
raise ImproperlyConfigured('Missing `feed` field')
feed = self.feed_class()
feed.populate(self.request, **self.kwargs)
self.category = feed.category
feed_object = feed.get_object(self.request, *self.args, **self.kwargs)
rss_feed = feed.get_feed(feed_object, self.request)
self.feed = rss_feed
class BaseAlert(LoginRequiredMixin,
FeedConverterMixin,
ListView):
template_name = 'feeds/alert_list.html'
context_object_name = 'alerts'
def breadcrumb_section(self):
return (_('Feeds'), '#')
def breadcrumb_subsection(self):
return (self.category, reverse('category_feeds', args=[
self.category.organisation.slug,
self.category.slug
]))
def get_queryset(self):
items = self.feed.items
return items
def get_context_data(self, **kwargs):
context = super(BaseAlert, self).get_context_data(**kwargs)
context.update({
'title': self.feed.feed['title'],
'description': self.feed.feed['description'],
'feed_url': self.feed.feed['link'],
})
return context
class AlertNewDocuments(BaseAlert):
"""List newly created documents."""
feed_class = feeds.FeedNewDocuments
def breadcrumb_object(self):
return (_('New documents'), reverse('alert_new_documents', args=[
self.category.organisation.slug,
self.category.slug
]))
class AlertClosedReviews(BaseAlert):
"""List newly closed reviews."""
feed_class = feeds.FeedClosedReviews
def breadcrumb_object(self):
return (_('Closed reviews'), reverse('alert_closed_reviews', args=[
self.category.organisation.slug,
self.category.slug
]))
class AlertStartedReviews(BaseAlert):
"""List newly created reviews."""
feed_class = feeds.FeedStartedReviews
def breadcrumb_object(self):
return (_('Started reviews'), reverse('alert_started_reviews', args=[
self.category.organisation.slug,
self.category.slug
]))
class AlertOverdueDocuments(BaseAlert):
"""List overdue documents."""
feed_class = feeds.FeedOverdueDocuments
def breadcrumb_object(self):
return (_('Overdue documents'), reverse('alert_overdue_documents', args=[
self.category.organisation.slug,
self.category.slug
]))
| 29.933884
| 81
| 0.655163
|
ee49de9ea14a8c6f8f1bfd48d4bf681e710b0d97
| 12,004
|
py
|
Python
|
src/algorithms/modules.py
|
jangirrishabh/look-closer
|
91647b429a5e8f85fe29079035d4f51772df8382
|
[
"MIT"
] | 15
|
2022-01-20T01:04:35.000Z
|
2022-01-29T09:30:50.000Z
|
src/algorithms/modules.py
|
jangirrishabh/look-closer
|
91647b429a5e8f85fe29079035d4f51772df8382
|
[
"MIT"
] | 1
|
2022-03-24T23:44:11.000Z
|
2022-03-27T21:59:24.000Z
|
src/algorithms/modules.py
|
jangirrishabh/look-closer
|
91647b429a5e8f85fe29079035d4f51772df8382
|
[
"MIT"
] | 1
|
2022-01-29T09:30:50.000Z
|
2022-01-29T09:30:50.000Z
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
from functools import partial
def _get_out_shape(in_shape, layers, attn=False):
x = torch.randn(*in_shape).unsqueeze(0)
if attn:
return layers(x, x, x).squeeze(0).shape
else:
return layers(x).squeeze(0).shape
def gaussian_logprob(noise, log_std):
"""Compute Gaussian log probability"""
residual = (-0.5 * noise.pow(2) - log_std).sum(-1, keepdim=True)
return residual - 0.5 * np.log(2 * np.pi) * noise.size(-1)
def squash(mu, pi, log_pi):
"""Apply squashing function, see appendix C from https://arxiv.org/pdf/1812.05905.pdf"""
mu = torch.tanh(mu)
if pi is not None:
pi = torch.tanh(pi)
if log_pi is not None:
log_pi -= torch.log(F.relu(1 - pi.pow(2)) + 1e-6).sum(-1, keepdim=True)
return mu, pi, log_pi
def orthogonal_init(m):
if isinstance(m, nn.Linear):
nn.init.orthogonal_(m.weight.data)
if hasattr(m.bias, 'data'):
m.bias.data.fill_(0.0)
elif isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
gain = nn.init.calculate_gain('relu')
nn.init.orthogonal_(m.weight.data, gain)
if hasattr(m.bias, 'data'):
m.bias.data.fill_(0.0)
class NormalizeImg(nn.Module):
def __init__(self, mean_zero=False):
super().__init__()
self.mean_zero = mean_zero
def forward(self, x):
if self.mean_zero:
return x/255. - 0.5
return x/255.
class Flatten(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return x.view(x.size(0), -1)
class Identity(nn.Module):
def __init__(self, obs_shape=None, out_dim=None):
super().__init__()
self.out_shape = obs_shape
self.out_dim = out_dim
def forward(self, x):
return x
class RandomShiftsAug(nn.Module):
def __init__(self, pad):
super().__init__()
self.pad = pad
def forward(self, x):
n, c, h, w = x.size()
assert h == w
padding = tuple([self.pad] * 4)
x = F.pad(x, padding, 'replicate')
eps = 1.0 / (h + 2 * self.pad)
arange = torch.linspace(-1.0 + eps,
1.0 - eps,
h + 2 * self.pad,
device=x.device,
dtype=x.dtype)[:h]
arange = arange.unsqueeze(0).repeat(h, 1).unsqueeze(2)
base_grid = torch.cat([arange, arange.transpose(1, 0)], dim=2)
base_grid = base_grid.unsqueeze(0).repeat(n, 1, 1, 1)
shift = torch.randint(0,
2 * self.pad + 1,
size=(n, 1, 1, 2),
device=x.device,
dtype=x.dtype)
shift *= 2.0 / (h + 2 * self.pad)
grid = base_grid + shift
return F.grid_sample(x,
grid,
padding_mode='zeros',
align_corners=False)
class SelfAttention(nn.Module):
def __init__(self, in_channels):
super().__init__()
self.conv_query = nn.Conv2d(in_channels=in_channels, out_channels=in_channels, kernel_size=1)
self.conv_key = nn.Conv2d(in_channels=in_channels, out_channels=in_channels, kernel_size=1)
self.conv_value = nn.Conv2d(in_channels=in_channels, out_channels=in_channels, kernel_size=1)
self.in_channels = in_channels
def forward(self, query, key, value):
N, C, H, W = query.shape
assert query.shape == key.shape == value.shape, "Key, query and value inputs must be of the same dimensions in this implementation"
q = self.conv_query(query).reshape(N, C, H*W)#.permute(0, 2, 1)
k = self.conv_key(key).reshape(N, C, H*W)#.permute(0, 2, 1)
v = self.conv_value(value).reshape(N, C, H*W)#.permute(0, 2, 1)
attention = k.transpose(1, 2)@q / C**0.5
attention = attention.softmax(dim=1)
output = v@attention
output = output.reshape(N, C, H, W)
return query + output # Add with query and output
class AttentionBlock(nn.Module):
def __init__(self, dim, num_heads=1, qkv_bias=False, qk_scale=None, norm_layer=nn.LayerNorm, contextualReasoning=False):
super().__init__()
self.norm1 = norm_layer(dim)
self.norm2 = norm_layer(dim)
self.norm3 = norm_layer(dim)
self.attn = SelfAttention(dim[0])
self.context = contextualReasoning
temp_shape = _get_out_shape(dim, self.attn, attn=True)
self.out_shape = _get_out_shape(temp_shape, nn.Flatten())
self.apply(orthogonal_init)
def forward(self, query, key, value):
x = self.attn(self.norm1(query), self.norm2(key), self.norm3(value))
if self.context:
return x
else:
x = x.flatten(start_dim=1)
return x
class SharedCNN(nn.Module):
def __init__(self, obs_shape, num_layers=11, num_filters=32, mean_zero=False):
super().__init__()
assert len(obs_shape) == 3
self.num_layers = num_layers
self.num_filters = num_filters
self.layers = [NormalizeImg(mean_zero), nn.Conv2d(obs_shape[0], num_filters, 3, stride=2)]
for _ in range(1, num_layers):
self.layers.append(nn.ReLU())
self.layers.append(nn.Conv2d(num_filters, num_filters, 3, stride=1))
self.layers = nn.Sequential(*self.layers)
self.out_shape = _get_out_shape(obs_shape, self.layers)
self.apply(orthogonal_init)
def forward(self, x):
return self.layers(x)
class HeadCNN(nn.Module):
def __init__(self, in_shape, num_layers=0, num_filters=32, flatten=True):
super().__init__()
self.layers = []
for _ in range(0, num_layers):
self.layers.append(nn.ReLU())
self.layers.append(nn.Conv2d(num_filters, num_filters, 3, stride=1))
if flatten:
self.layers.append(Flatten())
self.layers = nn.Sequential(*self.layers)
self.out_shape = _get_out_shape(in_shape, self.layers)
self.apply(orthogonal_init)
def forward(self, x):
return self.layers(x)
class Integrator(nn.Module):
def __init__(self, in_shape_1, in_shape_2, num_filters=32, concatenate=True):
super().__init__()
self.relu = nn.ReLU()
if concatenate:
self.conv1 = nn.Conv2d(in_shape_1[0]+in_shape_2[0], num_filters, (1,1))
else:
self.conv1 = nn.Conv2d(in_shape_1[0], num_filters, (1,1))
self.apply(orthogonal_init)
def forward(self, x):
x = self.conv1(self.relu(x))
return x
class Encoder(nn.Module):
def __init__(self, shared_cnn, head_cnn, projection, attention=None):
super().__init__()
self.shared_cnn = shared_cnn
self.head_cnn = head_cnn
self.projection = projection
self.attention = attention
self.out_dim = projection.out_dim
def forward(self, x, detach=False):
x = self.shared_cnn(x)
x = self.head_cnn(x)
if detach:
x = x.detach()
x = self.projection(x)
return x
class Mlp(nn.Module):
def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.fc1 = nn.Linear(in_features, hidden_features)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features)
self.drop = nn.Dropout(drop)
self.apply(orthogonal_init)
def forward(self, x):
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
def _init_weights(self):
nn.init.xavier_uniform_(self.fc1.weight)
nn.init.xavier_uniform_(self.fc2.weight)
nn.init.normal_(self.fc1.bias, std=1e-6)
nn.init.normal_(self.fc2.bias, std=1e-6)
class MultiViewEncoder(nn.Module):
def __init__(self, shared_cnn_1, shared_cnn_2, integrator, head_cnn, projection, attention1=None, attention2=None, mlp1=None, mlp2=None, norm1=None, norm2=None, concatenate=True, contextualReasoning1=False, contextualReasoning2=False):
super().__init__()
self.shared_cnn_1 = shared_cnn_1
self.shared_cnn_2 = shared_cnn_2
self.integrator = integrator
self.head_cnn = head_cnn
self.projection = projection
self.relu = nn.ReLU()
self.contextualReasoning1 = contextualReasoning1
self.contextualReasoning2 = contextualReasoning2
self.attention1 = attention1
self.attention2 = attention2
self.mlp1 = mlp1
self.norm1 = norm1
self.mlp2 = mlp2
self.norm2 = norm2
self.out_dim = projection.out_dim
self.concatenate = concatenate
def forward(self, x1, x2, detach=False):
x1 = self.shared_cnn_1(x1) #3rd Person
x2 = self.shared_cnn_2(x2)
B, C, H, W = x1.shape
if self.contextualReasoning1:
x1 = self.attention1(x1, x2, x2) # Contextual reasoning on 3rd person image based on 1st person image
x1 = self.norm1(x1)
x1 = x1.view(B, C, -1).permute(0, 2, 1)
x1 = self.mlp1(x1).permute(0, 2, 1).contiguous().view(B, C, H, W)
if self.contextualReasoning2:
x2 = self.attention2(x2, x1, x1) # Contextual reasoning on 1st person image based on 3rd person image
x2 = self.norm2(x2)
x2 = x2.view(B, C, -1).permute(0, 2, 1)
x2 = self.mlp2(x2).permute(0, 2, 1).contiguous().view(B, C, H, W)
if self.concatenate:
# Concatenate features along channel dimension
x = torch.cat((x1, x2), dim=1) # 1, 64, 21, 21
else:
x = x1 + x2 # 1, 32, 21, 21
x = self.integrator(x)
x = self.head_cnn(x)
if self.attention1 is not None and not self.contextualReasoning1:
x = self.relu(self.attention1(x, x, x))
if detach:
x = x.detach()
x = self.projection(x)
return x
class Actor(nn.Module):
def __init__(self, out_dim, projection_dim, state_shape, action_shape, hidden_dim, hidden_dim_state, log_std_min, log_std_max):
super().__init__()
self.log_std_min = log_std_min
self.log_std_max = log_std_max
self.trunk = nn.Sequential(nn.Linear(out_dim, projection_dim),
nn.LayerNorm(projection_dim), nn.Tanh())
self.layers = nn.Sequential(
nn.Linear(projection_dim, hidden_dim), nn.ReLU(inplace=True),
nn.Linear(hidden_dim, hidden_dim), nn.ReLU(inplace=True),
nn.Linear(hidden_dim, 2 * action_shape[0])
)
if state_shape:
self.state_encoder = nn.Sequential(nn.Linear(state_shape[0], hidden_dim_state),
nn.ReLU(inplace=True),
nn.Linear(hidden_dim_state, projection_dim),
nn.LayerNorm(projection_dim), nn.Tanh())
else:
self.state_encoder = None
self.apply(orthogonal_init)
def forward(self, x, state, compute_pi=True, compute_log_pi=True):
x = self.trunk(x)
if self.state_encoder:
x = x + self.state_encoder(state)
mu, log_std = self.layers(x).chunk(2, dim=-1)
log_std = torch.tanh(log_std)
log_std = self.log_std_min + 0.5 * (
self.log_std_max - self.log_std_min
) * (log_std + 1)
if compute_pi:
std = log_std.exp()
noise = torch.randn_like(mu)
pi = mu + noise * std
else:
pi = None
entropy = None
if compute_log_pi:
log_pi = gaussian_logprob(noise, log_std)
else:
log_pi = None
mu, pi, log_pi = squash(mu, pi, log_pi)
return mu, pi, log_pi, log_std
class Critic(nn.Module):
def __init__(self, out_dim, projection_dim, state_shape, action_shape, hidden_dim, hidden_dim_state):
super().__init__()
self.projection = nn.Sequential(nn.Linear(out_dim, projection_dim),
nn.LayerNorm(projection_dim), nn.Tanh())
if state_shape:
self.state_encoder = nn.Sequential(nn.Linear(state_shape[0], hidden_dim_state),
nn.ReLU(inplace=True),
nn.Linear(hidden_dim_state, projection_dim),
nn.LayerNorm(projection_dim), nn.Tanh())
else:
self.state_encoder = None
self.Q1 = nn.Sequential(
nn.Linear(projection_dim + action_shape[0], hidden_dim),
nn.ReLU(inplace=True), nn.Linear(hidden_dim, hidden_dim),
nn.ReLU(inplace=True), nn.Linear(hidden_dim, 1))
self.Q2 = nn.Sequential(
nn.Linear(projection_dim + action_shape[0], hidden_dim),
nn.ReLU(inplace=True), nn.Linear(hidden_dim, hidden_dim),
nn.ReLU(inplace=True), nn.Linear(hidden_dim, 1))
self.apply(orthogonal_init)
def forward(self, obs, state, action):
obs = self.projection(obs)
if self.state_encoder:
obs = obs + self.state_encoder(state)
h = torch.cat([obs, action], dim=-1)
return self.Q1(h), self.Q2(h)
| 30.544529
| 236
| 0.671276
|
6f7ccfcb3a8bade3319722a0ddd6d4729eebdb44
| 135
|
py
|
Python
|
skeleton/test_unittest.py
|
cjhnim/daily-kata-python
|
44a4ba9583b9f8c9db8e0114fa4837acbbd83746
|
[
"MIT"
] | null | null | null |
skeleton/test_unittest.py
|
cjhnim/daily-kata-python
|
44a4ba9583b9f8c9db8e0114fa4837acbbd83746
|
[
"MIT"
] | null | null | null |
skeleton/test_unittest.py
|
cjhnim/daily-kata-python
|
44a4ba9583b9f8c9db8e0114fa4837acbbd83746
|
[
"MIT"
] | null | null | null |
import inc_dec
def test_increment():
assert inc_dec.increment(3) == 4
def test_decrement():
assert inc_dec.decrement(3) == 2
| 16.875
| 36
| 0.703704
|
fd3a4d6b8569e760378a3b44a64e6eed5c4f8f84
| 842
|
py
|
Python
|
setup.py
|
kimnnmadsen/eve-inc-waitlist
|
c3e4853c5563a95edbf105c11e73d481595fb3ab
|
[
"MIT"
] | null | null | null |
setup.py
|
kimnnmadsen/eve-inc-waitlist
|
c3e4853c5563a95edbf105c11e73d481595fb3ab
|
[
"MIT"
] | 1
|
2020-02-18T05:11:20.000Z
|
2020-02-18T05:29:10.000Z
|
setup.py
|
kimnnmadsen/eve-inc-waitlist
|
c3e4853c5563a95edbf105c11e73d481595fb3ab
|
[
"MIT"
] | null | null | null |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="EVE Incursion waitlist",
version="1.7.4",
author="SpeedProg",
author_email="speedprogde@googlemail.com",
description="Waitlist geared towards EveOnline Incursion Groups",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/SpeedProg/eve-inc-waitlist",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points="""
[console_scripts]
waitlist = waitlist.entry:main
[babel.extractors]
waitlist_themes = waitlist.utility.babel.themes_extractor:extract
"""
)
| 28.066667
| 69
| 0.685273
|
594a5da50c8db4eaefad53ea4868473960b16db6
| 1,153
|
py
|
Python
|
var/spack/repos/builtin/packages/r-powerlaw/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11
|
2015-10-04T02:17:46.000Z
|
2018-02-07T18:23:00.000Z
|
var/spack/repos/builtin/packages/r-powerlaw/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 22
|
2017-08-01T22:45:10.000Z
|
2022-03-10T07:46:31.000Z
|
var/spack/repos/builtin/packages/r-powerlaw/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 4
|
2016-06-10T17:57:39.000Z
|
2018-09-11T04:59:38.000Z
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RPowerlaw(RPackage):
"""Analysis of Heavy Tailed Distributions.
An implementation of maximum likelihood estimators for a variety of heavy
tailed distributions, including both the discrete and continuous power law
distributions. Additionally, a goodness-of-fit based approach is used to
estimate the lower cut-off for the scaling region."""
cran = "poweRlaw"
version('0.70.6', sha256='efc091449c5c6494c1c13c85a8eb95625d1c55ffffebe86c7ea16e4abbafa191')
version('0.70.2', sha256='240f1454389b1a00ad483fb63e5b53243cc9367f21a3e7253ab2c293673459ab')
version('0.70.1', sha256='15b1b8dadeb550c01b9f1308cfa64720be6fbf56afb80f6a096987d6a0055913')
depends_on('r@3.1.0:', type=('build', 'run'))
depends_on('r@3.4.0:', type=('build', 'run'), when='@0.70.2:')
depends_on('r-pracma', type=('build', 'run'), when='@0.70.6:')
depends_on('r-vgam', type=('build', 'run'), when='@:0.70.2')
| 41.178571
| 96
| 0.728534
|
a234a929fe189de6ef8d3b715afa44339aa650c5
| 2,521
|
py
|
Python
|
verification/testcases/functional_testcases/test_individual_verification.py
|
DhivakharVenkatachalam/snet-marketplace-service
|
6aee606bc9b00d418caeae26c64deae03792e0ce
|
[
"MIT"
] | 14
|
2019-02-12T09:14:52.000Z
|
2021-03-11T18:42:22.000Z
|
verification/testcases/functional_testcases/test_individual_verification.py
|
prashantramangupta/snet-marketplace-service
|
7c293054e4b0207deefecc46defd743c064472a4
|
[
"MIT"
] | 1,079
|
2019-01-10T04:31:24.000Z
|
2022-03-29T06:16:42.000Z
|
verification/testcases/functional_testcases/test_individual_verification.py
|
prashantramangupta/snet-marketplace-service
|
7c293054e4b0207deefecc46defd743c064472a4
|
[
"MIT"
] | 20
|
2018-12-18T13:06:41.000Z
|
2021-09-17T11:13:01.000Z
|
import json
from datetime import datetime
from unittest import TestCase
from unittest.mock import patch, Mock
from uuid import uuid4
from common.exceptions import MethodNotImplemented
from verification.application.handlers.verification_handlers import initiate, callback
from verification.application.services.verification_manager import verification_repository
from verification.constants import VerificationStatus, DUNSVerificationStatus
from verification.infrastructure.models import VerificationModel
class TestIndividualVerification(TestCase):
def test_initiate(self):
username = "karl@dummy.io"
event = {
"requestContext": {"authorizer": {"claims": {"email": username}}},
"body": json.dumps({
"type": "INDIVIDUAL"
})
}
initiate(event, None)
verification = verification_repository.session.query(VerificationModel) \
.filter(VerificationModel.entity_id == username) \
.order_by(VerificationModel.created_at.desc()).first()
if verification is None:
assert False
self.assertEqual(VerificationStatus.APPROVED.value, verification.status)
self.assertEqual(username, verification.entity_id)
@patch("common.boto_utils.BotoUtils", return_value=Mock(get_ssm_parameter=Mock(return_value="123"),
invoke_lambda=Mock(return_value={"statusCode": 201})))
def test_callback(self, mock_boto):
test_verification_id = "9f2c90119cb7424b8d69319ce211ddfc"
verification_type = "INDIVIDUAL"
org_uuid = uuid4().hex
username = "karl@dummy.io"
current_time = datetime.utcnow()
verification_repository.add_item(VerificationModel(
id=test_verification_id, verification_type=verification_type, entity_id=org_uuid, status="PENDING",
requestee=username, created_at=current_time, updated_at=current_time
))
event = {
"requestContext": {"authorizer": {"claims": {"email": username}}},
"queryStringParameters": {"verification_id": test_verification_id},
"body": json.dumps({
"verificationStatus": "APPROVED",
"reviewed_by": "admin@dummy.io",
"comment": "looks good"
})
}
self.assertRaises(Exception, callback, event, None)
def tearDown(self):
verification_repository.session.query(VerificationModel).delete()
verification_repository.session.commit()
| 42.016667
| 111
| 0.685442
|
a22913868525a9d0d6fd5a2eff0269a895e3b3b3
| 12,964
|
py
|
Python
|
test/sql/test_from_linter.py
|
petit87/sqlalchemy
|
67d674bd63ca36ac32b23f96e2b19e9dac6b0863
|
[
"MIT"
] | 5,383
|
2018-11-27T07:34:03.000Z
|
2022-03-31T19:40:59.000Z
|
test/sql/test_from_linter.py
|
petit87/sqlalchemy
|
67d674bd63ca36ac32b23f96e2b19e9dac6b0863
|
[
"MIT"
] | 2,719
|
2018-11-27T07:55:01.000Z
|
2022-03-31T22:09:44.000Z
|
test/sql/test_from_linter.py
|
petit87/sqlalchemy
|
67d674bd63ca36ac32b23f96e2b19e9dac6b0863
|
[
"MIT"
] | 1,056
|
2015-01-03T00:30:17.000Z
|
2022-03-15T12:56:24.000Z
|
from sqlalchemy import Integer
from sqlalchemy import select
from sqlalchemy import sql
from sqlalchemy import testing
from sqlalchemy import true
from sqlalchemy.testing import config
from sqlalchemy.testing import engines
from sqlalchemy.testing import expect_warnings
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
def find_unmatching_froms(query, start=None):
compiled = query.compile(linting=sql.COLLECT_CARTESIAN_PRODUCTS)
return compiled.from_linter.lint(start)
class TestFindUnmatchingFroms(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table("table_a", metadata, Column("col_a", Integer, primary_key=True))
Table("table_b", metadata, Column("col_b", Integer, primary_key=True))
Table("table_c", metadata, Column("col_c", Integer, primary_key=True))
Table("table_d", metadata, Column("col_d", Integer, primary_key=True))
def setup_test(self):
self.a = self.tables.table_a
self.b = self.tables.table_b
self.c = self.tables.table_c
self.d = self.tables.table_d
def test_everything_is_connected(self):
query = (
select(self.a)
.select_from(self.a.join(self.b, self.a.c.col_a == self.b.c.col_b))
.select_from(self.c)
.select_from(self.d)
.where(self.d.c.col_d == self.b.c.col_b)
.where(self.c.c.col_c == self.d.c.col_d)
.where(self.c.c.col_c == 5)
)
froms, start = find_unmatching_froms(query)
assert not froms
for start in self.a, self.b, self.c, self.d:
froms, start = find_unmatching_froms(query, start)
assert not froms
def test_plain_cartesian(self):
query = select(self.a).where(self.b.c.col_b == 5)
froms, start = find_unmatching_froms(query, self.a)
assert start == self.a
assert froms == {self.b}
froms, start = find_unmatching_froms(query, self.b)
assert start == self.b
assert froms == {self.a}
@testing.combinations(("lateral",), ("cartesian",), ("join",))
def test_lateral_subqueries(self, control):
"""
::
test=> create table a (id integer);
CREATE TABLE
test=> create table b (id integer);
CREATE TABLE
test=> insert into a(id) values (1), (2), (3);
INSERT 0 3
test=> insert into b(id) values (1), (2), (3);
INSERT 0 3
test=> select * from (select id from a) as a1,
lateral (select id from b where id=a1.id) as b1;
id | id
----+----
1 | 1
2 | 2
3 | 3
(3 rows)
"""
p1 = select(self.a).subquery()
p2 = select(self.b).where(self.b.c.col_b == p1.c.col_a).subquery()
if control == "lateral":
p2 = p2.lateral()
query = select(p1, p2)
if control == "join":
query = query.join_from(p1, p2, p1.c.col_a == p2.c.col_b)
froms, start = find_unmatching_froms(query, p1)
if control == "cartesian":
assert start is p1
assert froms == {p2}
else:
assert start is None
assert froms is None
froms, start = find_unmatching_froms(query, p2)
if control == "cartesian":
assert start is p2
assert froms == {p1}
else:
assert start is None
assert froms is None
def test_lateral_subqueries_w_joins(self):
p1 = select(self.a).subquery()
p2 = (
select(self.b)
.where(self.b.c.col_b == p1.c.col_a)
.subquery()
.lateral()
)
p3 = (
select(self.c)
.where(self.c.c.col_c == p1.c.col_a)
.subquery()
.lateral()
)
query = select(p1, p2, p3).join_from(p1, p2, true()).join(p3, true())
for p in (p1, p2, p3):
froms, start = find_unmatching_froms(query, p)
assert start is None
assert froms is None
def test_lateral_subqueries_ok_do_we_still_find_cartesians(self):
p1 = select(self.a).subquery()
p3 = select(self.a).subquery()
p2 = select(self.b).where(self.b.c.col_b == p3.c.col_a).subquery()
p2 = p2.lateral()
query = select(p1, p2, p3)
froms, start = find_unmatching_froms(query, p1)
assert start is p1
assert froms == {p2, p3}
froms, start = find_unmatching_froms(query, p2)
assert start is p2
assert froms == {p1}
froms, start = find_unmatching_froms(query, p3)
assert start is p3
assert froms == {p1}
def test_count_non_eq_comparison_operators(self):
query = select(self.a).where(self.a.c.col_a > self.b.c.col_b)
froms, start = find_unmatching_froms(query, self.a)
is_(start, None)
is_(froms, None)
def test_dont_count_non_comparison_operators(self):
query = select(self.a).where(self.a.c.col_a + self.b.c.col_b == 5)
froms, start = find_unmatching_froms(query, self.a)
assert start == self.a
assert froms == {self.b}
def test_disconnect_between_ab_cd(self):
query = (
select(self.a)
.select_from(self.a.join(self.b, self.a.c.col_a == self.b.c.col_b))
.select_from(self.c)
.select_from(self.d)
.where(self.c.c.col_c == self.d.c.col_d)
.where(self.c.c.col_c == 5)
)
for start in self.a, self.b:
froms, start = find_unmatching_froms(query, start)
assert start == start
assert froms == {self.c, self.d}
for start in self.c, self.d:
froms, start = find_unmatching_froms(query, start)
assert start == start
assert froms == {self.a, self.b}
def test_c_and_d_both_disconnected(self):
query = (
select(self.a)
.select_from(self.a.join(self.b, self.a.c.col_a == self.b.c.col_b))
.where(self.c.c.col_c == 5)
.where(self.d.c.col_d == 10)
)
for start in self.a, self.b:
froms, start = find_unmatching_froms(query, start)
assert start == start
assert froms == {self.c, self.d}
froms, start = find_unmatching_froms(query, self.c)
assert start == self.c
assert froms == {self.a, self.b, self.d}
froms, start = find_unmatching_froms(query, self.d)
assert start == self.d
assert froms == {self.a, self.b, self.c}
def test_now_connected(self):
query = (
select(self.a)
.select_from(self.a.join(self.b, self.a.c.col_a == self.b.c.col_b))
.select_from(self.c.join(self.d, self.c.c.col_c == self.d.c.col_d))
.where(self.c.c.col_c == self.b.c.col_b)
.where(self.c.c.col_c == 5)
.where(self.d.c.col_d == 10)
)
froms, start = find_unmatching_froms(query)
assert not froms
for start in self.a, self.b, self.c, self.d:
froms, start = find_unmatching_froms(query, start)
assert not froms
def test_disconnected_subquery(self):
subq = (
select(self.a).where(self.a.c.col_a == self.b.c.col_b).subquery()
)
stmt = select(self.c).select_from(subq)
froms, start = find_unmatching_froms(stmt, self.c)
assert start == self.c
assert froms == {subq}
froms, start = find_unmatching_froms(stmt, subq)
assert start == subq
assert froms == {self.c}
def test_now_connect_it(self):
subq = (
select(self.a).where(self.a.c.col_a == self.b.c.col_b).subquery()
)
stmt = (
select(self.c)
.select_from(subq)
.where(self.c.c.col_c == subq.c.col_a)
)
froms, start = find_unmatching_froms(stmt)
assert not froms
for start in self.c, subq:
froms, start = find_unmatching_froms(stmt, start)
assert not froms
def test_right_nested_join_without_issue(self):
query = select(self.a).select_from(
self.a.join(
self.b.join(self.c, self.b.c.col_b == self.c.c.col_c),
self.a.c.col_a == self.b.c.col_b,
)
)
froms, start = find_unmatching_froms(query)
assert not froms
for start in self.a, self.b, self.c:
froms, start = find_unmatching_froms(query, start)
assert not froms
def test_join_on_true(self):
# test that a join(a, b) counts a->b as an edge even if there isn't
# actually a join condition. this essentially allows a cartesian
# product to be added explicitly.
query = select(self.a).select_from(self.a.join(self.b, true()))
froms, start = find_unmatching_froms(query)
assert not froms
def test_join_on_true_muti_levels(self):
"""test #6886"""
# test that a join(a, b).join(c) counts b->c as an edge even if there
# isn't actually a join condition. this essentially allows a cartesian
# product to be added explicitly.
query = select(self.a, self.b, self.c).select_from(
self.a.join(self.b, true()).join(self.c, true())
)
froms, start = find_unmatching_froms(query)
assert not froms
def test_right_nested_join_with_an_issue(self):
query = (
select(self.a)
.select_from(
self.a.join(
self.b.join(self.c, self.b.c.col_b == self.c.c.col_c),
self.a.c.col_a == self.b.c.col_b,
)
)
.where(self.d.c.col_d == 5)
)
for start in self.a, self.b, self.c:
froms, start = find_unmatching_froms(query, start)
assert start == start
assert froms == {self.d}
froms, start = find_unmatching_froms(query, self.d)
assert start == self.d
assert froms == {self.a, self.b, self.c}
def test_no_froms(self):
query = select(1)
froms, start = find_unmatching_froms(query)
assert not froms
class TestLinter(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
Table("table_a", metadata, Column("col_a", Integer, primary_key=True))
Table("table_b", metadata, Column("col_b", Integer, primary_key=True))
@classmethod
def setup_bind(cls):
# from linting is enabled by default
return config.db
def test_noop_for_unhandled_objects(self):
with self.bind.connect() as conn:
conn.exec_driver_sql("SELECT 1;").fetchone()
def test_does_not_modify_query(self):
with self.bind.connect() as conn:
[result] = conn.execute(select(1)).fetchone()
assert result == 1
def test_warn_simple(self):
a, b = self.tables("table_a", "table_b")
query = select(a.c.col_a).where(b.c.col_b == 5)
with expect_warnings(
r"SELECT statement has a cartesian product between FROM "
r'element\(s\) "table_[ab]" '
r'and FROM element "table_[ba]"'
):
with self.bind.connect() as conn:
conn.execute(query)
def test_warn_anon_alias(self):
a, b = self.tables("table_a", "table_b")
b_alias = b.alias()
query = select(a.c.col_a).where(b_alias.c.col_b == 5)
with expect_warnings(
r"SELECT statement has a cartesian product between FROM "
r'element\(s\) "table_(?:a|b_1)" '
r'and FROM element "table_(?:a|b_1)"'
):
with self.bind.connect() as conn:
conn.execute(query)
def test_warn_anon_cte(self):
a, b = self.tables("table_a", "table_b")
b_cte = select(b).cte()
query = select(a.c.col_a).where(b_cte.c.col_b == 5)
with expect_warnings(
r"SELECT statement has a cartesian product between "
r"FROM element\(s\) "
r'"(?:anon_1|table_a)" '
r'and FROM element "(?:anon_1|table_a)"'
):
with self.bind.connect() as conn:
conn.execute(query)
def test_no_linting(self, metadata, connection):
eng = engines.testing_engine(
options={"enable_from_linting": False, "use_reaper": False}
)
eng.pool = self.bind.pool # needed for SQLite
a, b = self.tables("table_a", "table_b")
query = select(a.c.col_a).where(b.c.col_b == 5)
with eng.connect() as conn:
conn.execute(query)
| 32.820253
| 79
| 0.569809
|
eb458ce46b97dfd617058eb354f363c4005234bf
| 2,774
|
py
|
Python
|
users/views.py
|
tutorin-tech/tit-api
|
ad487f1ae245e8bca84c450b6e01615cc056eba2
|
[
"Apache-2.0"
] | null | null | null |
users/views.py
|
tutorin-tech/tit-api
|
ad487f1ae245e8bca84c450b6e01615cc056eba2
|
[
"Apache-2.0"
] | null | null | null |
users/views.py
|
tutorin-tech/tit-api
|
ad487f1ae245e8bca84c450b6e01615cc056eba2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Denis Gavrilyuk. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
"""A module that contains the class-based views related to the users application. """
from django.contrib.auth import get_user_model
from rest_framework import generics, mixins, permissions, status
from rest_framework.response import Response
from .models import Person
from .serializers import SetPasswordSerializer, SignUpSerializer, CurrentUserSerializer
User = get_user_model()
class SetPasswordView(mixins.UpdateModelMixin, generics.GenericAPIView):
"""Sets the password when the user confirms the account. """
queryset = User.objects.all()
permission_classes = (permissions.AllowAny, )
serializer_class = SetPasswordSerializer
def put(self, request, *args, **kwargs):
"""PUT-method for setting user password after signing up. """
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
person = Person.objects.get(
accountconfirmationtoken__token=serializer.validated_data['token']
)
self.kwargs['pk'] = person.user.id
return self.update(request, *args, **kwargs)
return Response(status=status.HTTP_400_BAD_REQUEST, data=serializer.errors)
def update(self, request, *args, **kwargs):
instance = self.get_object()
serializer = self.get_serializer(instance, data=request.data)
serializer.is_valid() # Fake, real validation is done in put method.
self.perform_update(serializer)
return Response(status=status.HTTP_200_OK)
class SignUpView(mixins.CreateModelMixin, generics.GenericAPIView):
"""Creates a User model instance. """
permission_classes = (permissions.AllowAny, )
serializer_class = SignUpSerializer
def post(self, request, *args, **kwargs):
"""POST-method for creating new user. """
return self.create(request, *args, **kwargs)
class WhoAmIView(generics.RetrieveAPIView):
"""Returns the name of the authenticated user the request is sent on behalf of. """
queryset = User.objects.all()
serializer_class = CurrentUserSerializer
permission_classes = (permissions.IsAuthenticated, )
def get_object(self):
return self.request.user
| 35.564103
| 87
| 0.721702
|
f5a064ae31803d80d625f717f4927f8ffc22e3ff
| 5,454
|
py
|
Python
|
amr_coref/coref/build_embeddings.py
|
bjascob/amr_coref
|
e062fcf39b3db82889a58e52fd2cfdb772a279dc
|
[
"MIT"
] | 1
|
2021-03-04T13:02:04.000Z
|
2021-03-04T13:02:04.000Z
|
amr_coref/coref/build_embeddings.py
|
bjascob/amr_coref
|
e062fcf39b3db82889a58e52fd2cfdb772a279dc
|
[
"MIT"
] | 1
|
2021-12-05T18:48:58.000Z
|
2021-12-05T18:48:58.000Z
|
amr_coref/coref/build_embeddings.py
|
bjascob/amr_coref
|
e062fcf39b3db82889a58e52fd2cfdb772a279dc
|
[
"MIT"
] | null | null | null |
import re
import difflib
from tqdm import tqdm
from multiprocessing import Pool
from .vocab_embeddings import normalize_token
def build_embeddings(embed_in_dict, gdata_dict):
# Get all tokens from the serialized graph and normalize them
# normalize_token is called the Vocab class before getting the emebdding index.
token_set = set()
for gdata in gdata_dict.values():
tokens = gdata['sgraph'].split()
token_set.update([normalize_token(t) for t in tokens])
token_set.discard(None) # normalize can returns None for empty tokens
# Put everything in sets and print some stats
embed_in_set = set(embed_in_dict)
missing_set = token_set - embed_in_set
common_set = token_set & embed_in_set
print('There are {:,} total tokens in the emebdding set'.format(len(embed_in_set)))
print('There are {:,} total tokens in common'.format(len(common_set)))
print('There are {:,} missing initially'.format(len(missing_set)))
# Add some known translation for edge tokens
proxy_dict0 = {}
proxy_dict0, missing_set = edge_match(missing_set, embed_in_set)
print('There are {:,} missing after edge matching'.format(len(missing_set)))
# Now do some more fuzzy matching to try to map unknown simple attribs to tokens in the embedding file
proxy_dict1, missing_set = simple_match(missing_set, embed_in_set)
print('There are {:,} missing after simple matching'.format(len(missing_set)))
proxy_dict2, missing_set = fuzzy_match(missing_set, embed_in_set)
print('There are {:,} missing after difflib matching'.format(len(missing_set)))
# Combine the proxy dictionaries
proxy_dict = {**proxy_dict0, **proxy_dict1, **proxy_dict2}
final_embed_set = common_set
# Add in all the GloVe tokens that are needed as vectors for proxy token
final_embed_set.update(proxy_dict.values())
# Sanity check
for token in sorted(token_set):
if token not in final_embed_set and token not in missing_set:
assert token in proxy_dict
print('There are {:,} final embedding tokens'.format(len(final_embed_set)))
# Filter the original embedding dict for words in the new vocabalulary
embed_out_dict = {k:v for k, v in embed_in_dict.items() if k in final_embed_set}
# Copy (duplicate) existing embedding vectors to proxy names the proxy_dict
for proxy_token, glove_token in proxy_dict.items():
embed_out_dict[proxy_token] = embed_out_dict[glove_token]
print('There are {:,} tokens after appling proxies'.format(len(embed_out_dict)))
# For debug
if 1:
pdfn = '/tmp/proxy_dict.txt'
print('Debug proxy dict written to', pdfn)
with open(pdfn, 'w') as f:
for k, v in sorted(proxy_dict.items()):
f.write('%-20s : %s\n' % (k, v))
return embed_out_dict
# Translate opX and argX to op and arg, which are in the embeddings
re_op = re.compile(r'^op\d+$')
re_arg = re.compile(r'^arg\d+$')
def edge_match(missing_set, embed_set):
missing_set = missing_set.copy()
proxy_dict = {}
for token in sorted(missing_set):
if re_op.search(token):
proxy_dict[token] = 'op'
missing_set.remove(token)
elif re_arg.search(token):
proxy_dict[token] = 'arg'
missing_set.remove(token)
return proxy_dict, missing_set
# Do some simple matching
def simple_match(missing_set, embed_set):
missing_set = missing_set.copy()
proxy_dict = {}
for token in sorted(missing_set):
# check for integers and replace with something known to be in the set
# The original embeddings have 0,1,.. so the only integers missing are
# larger values.
if token.isnumeric():
proxy_dict[token] = '1000'
missing_set.remove(token)
assert '10' in embed_set
# Replace words with dashes with a partial word
elif '-' in token:
for test in token.split('-'):
if test in embed_set:
proxy_dict[token] = test
missing_set.remove(token)
break
# Replace words with underscores with a partial word
elif '_' in token:
for test in token.split('_'):
if test in embed_set:
proxy_dict[token] = test
missing_set.remove(token)
break
return proxy_dict, missing_set
# Do more expensive partial string matching
def fuzzy_match(missing_set, embed_set):
missing_set = missing_set.copy()
proxy_dict = {}
if not missing_set:
return proxy_dict, missing_set
# Do multiprocessing matching
global g_embed_set
g_embed_set = embed_set
missing_list = sorted(missing_set)
with Pool() as pool:
for i, proxy_token in enumerate(pool.imap(difflib_worker, missing_list, chunksize=1)):
if proxy_token is not None:
assert proxy_token in embed_set
token = missing_list[i]
proxy_dict[token] = proxy_token
missing_set.remove(token)
return proxy_dict, missing_set
# Worker function to run difflib matching in multiprocessing pool
g_embed_set = None
def difflib_worker(token):
global g_embed_set
matches = difflib.get_close_matches(token, g_embed_set, n=1, cutoff=0.6)
if len(matches) > 0:
return matches[0]
else:
return None
| 41.318182
| 106
| 0.666667
|
ceede86b847a6ddb94b441b824113bb98e016f25
| 1,554
|
py
|
Python
|
Chapter03/traffic.py
|
marcjour303/PytML
|
cd1391976167a7a671e98a1f588898c01585cee9
|
[
"MIT"
] | 36
|
2019-04-05T00:58:57.000Z
|
2022-03-12T09:25:04.000Z
|
Chapter03/traffic.py
|
ClauPorto/Python-Machine-Learning-Cookbook-Second-Edition
|
99d8b799dbfe1d9a82f0bcc3648aaeb147b7298f
|
[
"MIT"
] | null | null | null |
Chapter03/traffic.py
|
ClauPorto/Python-Machine-Learning-Cookbook-Second-Edition
|
99d8b799dbfe1d9a82f0bcc3648aaeb147b7298f
|
[
"MIT"
] | 37
|
2019-04-16T00:50:20.000Z
|
2022-02-28T18:14:41.000Z
|
# SVM regressor to estimate traffic
import numpy as np
from sklearn import preprocessing
from sklearn.svm import SVR
input_file = 'traffic_data.txt'
# Reading the data
X = []
count = 0
with open(input_file, 'r') as f:
for line in f.readlines():
data = line[:-1].split(',')
X.append(data)
X = np.array(X)
# Convert string data to numerical data
label_encoder = []
X_encoded = np.empty(X.shape)
for i,item in enumerate(X[0]):
if item.isdigit():
X_encoded[:, i] = X[:, i]
else:
label_encoder.append(preprocessing.LabelEncoder())
X_encoded[:, i] = label_encoder[-1].fit_transform(X[:, i])
X = X_encoded[:, :-1].astype(int)
y = X_encoded[:, -1].astype(int)
# Build SVR
params = {'kernel': 'rbf', 'C': 10.0, 'epsilon': 0.2}
regressor = SVR(**params)
regressor.fit(X, y)
# Cross validation
import sklearn.metrics as sm
y_pred = regressor.predict(X)
print("Mean absolute error =", round(sm.mean_absolute_error(y, y_pred), 2))
# Testing encoding on single data instance
input_data = ['Tuesday', '13:35', 'San Francisco', 'yes']
input_data_encoded = [-1] * len(input_data)
count = 0
for i,item in enumerate(input_data):
if item.isdigit():
input_data_encoded[i] = int(input_data[i])
else:
input_data_encoded[i] = int(label_encoder[count].transform([input_data[i]]))
count = count + 1
input_data_encoded = np.array(input_data_encoded)
# Predict and print output for a particular datapoint
print("Predicted traffic:", int(regressor.predict([input_data_encoded])[0]))
| 26.338983
| 84
| 0.675032
|
378c7fe7ad49032bbc94d23a12363c35294e744d
| 14,446
|
py
|
Python
|
sdk/python/pulumi_azure_native/web/latest/get_web_app_site_extension.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/web/latest/get_web_app_site_extension.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/web/latest/get_web_app_site_extension.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetWebAppSiteExtensionResult',
'AwaitableGetWebAppSiteExtensionResult',
'get_web_app_site_extension',
]
warnings.warn("""The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-native:web:getWebAppSiteExtension'.""", DeprecationWarning)
@pulumi.output_type
class GetWebAppSiteExtensionResult:
"""
Site Extension Information.
"""
def __init__(__self__, authors=None, comment=None, description=None, download_count=None, extension_id=None, extension_type=None, extension_url=None, feed_url=None, icon_url=None, id=None, installed_date_time=None, installer_command_line_params=None, kind=None, license_url=None, local_is_latest_version=None, local_path=None, name=None, project_url=None, provisioning_state=None, published_date_time=None, summary=None, system_data=None, title=None, type=None, version=None):
if authors and not isinstance(authors, list):
raise TypeError("Expected argument 'authors' to be a list")
pulumi.set(__self__, "authors", authors)
if comment and not isinstance(comment, str):
raise TypeError("Expected argument 'comment' to be a str")
pulumi.set(__self__, "comment", comment)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if download_count and not isinstance(download_count, int):
raise TypeError("Expected argument 'download_count' to be a int")
pulumi.set(__self__, "download_count", download_count)
if extension_id and not isinstance(extension_id, str):
raise TypeError("Expected argument 'extension_id' to be a str")
pulumi.set(__self__, "extension_id", extension_id)
if extension_type and not isinstance(extension_type, str):
raise TypeError("Expected argument 'extension_type' to be a str")
pulumi.set(__self__, "extension_type", extension_type)
if extension_url and not isinstance(extension_url, str):
raise TypeError("Expected argument 'extension_url' to be a str")
pulumi.set(__self__, "extension_url", extension_url)
if feed_url and not isinstance(feed_url, str):
raise TypeError("Expected argument 'feed_url' to be a str")
pulumi.set(__self__, "feed_url", feed_url)
if icon_url and not isinstance(icon_url, str):
raise TypeError("Expected argument 'icon_url' to be a str")
pulumi.set(__self__, "icon_url", icon_url)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if installed_date_time and not isinstance(installed_date_time, str):
raise TypeError("Expected argument 'installed_date_time' to be a str")
pulumi.set(__self__, "installed_date_time", installed_date_time)
if installer_command_line_params and not isinstance(installer_command_line_params, str):
raise TypeError("Expected argument 'installer_command_line_params' to be a str")
pulumi.set(__self__, "installer_command_line_params", installer_command_line_params)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if license_url and not isinstance(license_url, str):
raise TypeError("Expected argument 'license_url' to be a str")
pulumi.set(__self__, "license_url", license_url)
if local_is_latest_version and not isinstance(local_is_latest_version, bool):
raise TypeError("Expected argument 'local_is_latest_version' to be a bool")
pulumi.set(__self__, "local_is_latest_version", local_is_latest_version)
if local_path and not isinstance(local_path, str):
raise TypeError("Expected argument 'local_path' to be a str")
pulumi.set(__self__, "local_path", local_path)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if project_url and not isinstance(project_url, str):
raise TypeError("Expected argument 'project_url' to be a str")
pulumi.set(__self__, "project_url", project_url)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if published_date_time and not isinstance(published_date_time, str):
raise TypeError("Expected argument 'published_date_time' to be a str")
pulumi.set(__self__, "published_date_time", published_date_time)
if summary and not isinstance(summary, str):
raise TypeError("Expected argument 'summary' to be a str")
pulumi.set(__self__, "summary", summary)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if title and not isinstance(title, str):
raise TypeError("Expected argument 'title' to be a str")
pulumi.set(__self__, "title", title)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if version and not isinstance(version, str):
raise TypeError("Expected argument 'version' to be a str")
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def authors(self) -> Optional[Sequence[str]]:
"""
List of authors.
"""
return pulumi.get(self, "authors")
@property
@pulumi.getter
def comment(self) -> Optional[str]:
"""
Site Extension comment.
"""
return pulumi.get(self, "comment")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Detailed description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="downloadCount")
def download_count(self) -> Optional[int]:
"""
Count of downloads.
"""
return pulumi.get(self, "download_count")
@property
@pulumi.getter(name="extensionId")
def extension_id(self) -> Optional[str]:
"""
Site extension ID.
"""
return pulumi.get(self, "extension_id")
@property
@pulumi.getter(name="extensionType")
def extension_type(self) -> Optional[str]:
"""
Site extension type.
"""
return pulumi.get(self, "extension_type")
@property
@pulumi.getter(name="extensionUrl")
def extension_url(self) -> Optional[str]:
"""
Extension URL.
"""
return pulumi.get(self, "extension_url")
@property
@pulumi.getter(name="feedUrl")
def feed_url(self) -> Optional[str]:
"""
Feed URL.
"""
return pulumi.get(self, "feed_url")
@property
@pulumi.getter(name="iconUrl")
def icon_url(self) -> Optional[str]:
"""
Icon URL.
"""
return pulumi.get(self, "icon_url")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="installedDateTime")
def installed_date_time(self) -> Optional[str]:
"""
Installed timestamp.
"""
return pulumi.get(self, "installed_date_time")
@property
@pulumi.getter(name="installerCommandLineParams")
def installer_command_line_params(self) -> Optional[str]:
"""
Installer command line parameters.
"""
return pulumi.get(self, "installer_command_line_params")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="licenseUrl")
def license_url(self) -> Optional[str]:
"""
License URL.
"""
return pulumi.get(self, "license_url")
@property
@pulumi.getter(name="localIsLatestVersion")
def local_is_latest_version(self) -> Optional[bool]:
"""
<code>true</code> if the local version is the latest version; <code>false</code> otherwise.
"""
return pulumi.get(self, "local_is_latest_version")
@property
@pulumi.getter(name="localPath")
def local_path(self) -> Optional[str]:
"""
Local path.
"""
return pulumi.get(self, "local_path")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="projectUrl")
def project_url(self) -> Optional[str]:
"""
Project URL.
"""
return pulumi.get(self, "project_url")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Provisioning state.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publishedDateTime")
def published_date_time(self) -> Optional[str]:
"""
Published timestamp.
"""
return pulumi.get(self, "published_date_time")
@property
@pulumi.getter
def summary(self) -> Optional[str]:
"""
Summary description.
"""
return pulumi.get(self, "summary")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system metadata relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def title(self) -> Optional[str]:
return pulumi.get(self, "title")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
Version information.
"""
return pulumi.get(self, "version")
class AwaitableGetWebAppSiteExtensionResult(GetWebAppSiteExtensionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWebAppSiteExtensionResult(
authors=self.authors,
comment=self.comment,
description=self.description,
download_count=self.download_count,
extension_id=self.extension_id,
extension_type=self.extension_type,
extension_url=self.extension_url,
feed_url=self.feed_url,
icon_url=self.icon_url,
id=self.id,
installed_date_time=self.installed_date_time,
installer_command_line_params=self.installer_command_line_params,
kind=self.kind,
license_url=self.license_url,
local_is_latest_version=self.local_is_latest_version,
local_path=self.local_path,
name=self.name,
project_url=self.project_url,
provisioning_state=self.provisioning_state,
published_date_time=self.published_date_time,
summary=self.summary,
system_data=self.system_data,
title=self.title,
type=self.type,
version=self.version)
def get_web_app_site_extension(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
site_extension_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWebAppSiteExtensionResult:
"""
Site Extension Information.
Latest API Version: 2020-10-01.
:param str name: Site name.
:param str resource_group_name: Name of the resource group to which the resource belongs.
:param str site_extension_id: Site extension name.
"""
pulumi.log.warn("""get_web_app_site_extension is deprecated: The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-native:web:getWebAppSiteExtension'.""")
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
__args__['siteExtensionId'] = site_extension_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:web/latest:getWebAppSiteExtension', __args__, opts=opts, typ=GetWebAppSiteExtensionResult).value
return AwaitableGetWebAppSiteExtensionResult(
authors=__ret__.authors,
comment=__ret__.comment,
description=__ret__.description,
download_count=__ret__.download_count,
extension_id=__ret__.extension_id,
extension_type=__ret__.extension_type,
extension_url=__ret__.extension_url,
feed_url=__ret__.feed_url,
icon_url=__ret__.icon_url,
id=__ret__.id,
installed_date_time=__ret__.installed_date_time,
installer_command_line_params=__ret__.installer_command_line_params,
kind=__ret__.kind,
license_url=__ret__.license_url,
local_is_latest_version=__ret__.local_is_latest_version,
local_path=__ret__.local_path,
name=__ret__.name,
project_url=__ret__.project_url,
provisioning_state=__ret__.provisioning_state,
published_date_time=__ret__.published_date_time,
summary=__ret__.summary,
system_data=__ret__.system_data,
title=__ret__.title,
type=__ret__.type,
version=__ret__.version)
| 37.718016
| 480
| 0.648484
|
a68aba9651c0a1440755a5b0b0aecaff783f8b6e
| 76
|
py
|
Python
|
src/application/services.py
|
ericlongxuan/intersection-server
|
b6284c85ca7e405f525901b4ac986b9941906971
|
[
"MIT",
"CC-BY-3.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
src/application/services.py
|
ericlongxuan/intersection-server
|
b6284c85ca7e405f525901b4ac986b9941906971
|
[
"MIT",
"CC-BY-3.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
src/application/services.py
|
ericlongxuan/intersection-server
|
b6284c85ca7e405f525901b4ac986b9941906971
|
[
"MIT",
"CC-BY-3.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
class Services(object):
def add_user(self, user_model):
return
| 15.2
| 35
| 0.657895
|
601fb3f7fff1ea63b4d5ac791b22dafe92a8eee3
| 9,206
|
py
|
Python
|
honssh/honeypot/docker_utils/docker_driver.py
|
tsarpaul/honssh
|
f2d6ad7fda81853a117d968379adbd7daa1ef57f
|
[
"BSD-3-Clause"
] | null | null | null |
honssh/honeypot/docker_utils/docker_driver.py
|
tsarpaul/honssh
|
f2d6ad7fda81853a117d968379adbd7daa1ef57f
|
[
"BSD-3-Clause"
] | null | null | null |
honssh/honeypot/docker_utils/docker_driver.py
|
tsarpaul/honssh
|
f2d6ad7fda81853a117d968379adbd7daa1ef57f
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2016 Thomas Nicholson <tnnich@googlemail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The names of the author(s) may not be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import os
import uuid
from honssh import log
from docker import Client
from dirsync import sync
from watchdog.observers import Observer
from .docker_filesystem import DockerFileSystemEventHandler
class DockerDriver(object):
def __init__(self, uri, image, launch_cmd, hostname, pids_limit, mem_limit, memswap_limit, shm_size, cpu_period,
cpu_shares, cpuset_cpus, peer_ip, reuse_container):
self.container_id = None
self.container_ip = None
self.connection = None
self.uri = uri
self.image = image
self.hostname = hostname
self.launch_cmd = launch_cmd
self.pids_limit = pids_limit
self.mem_limit = mem_limit
self.memswap_limit = memswap_limit
self.shm_size = shm_size
self.cpu_period = cpu_period
self.cpu_shares = cpu_shares
self.cpuset_cpus = cpuset_cpus
self.peer_ip = peer_ip
self.reuse_container = reuse_container
self.watcher = None
self.syncing = False
self.overlay_folder = None
self.mount_dir = None
self.max_filesize = 0
self.use_revisions = False
self.make_connection()
def make_connection(self):
self.connection = Client(self.uri)
def try_reuse_alive_container(self, old_container_id):
# Check for existing, active container
containers_alive = self.connection.containers()
old_container = [c for c in containers_alive if c['Id'] == old_container_id]
if old_container:
old_container = old_container[0]
container_id = old_container_id
container_ip = old_container['NetworkSettings']['Networks']['bridge']['IPAddress']
log.msg(log.LGREEN, '[PLUGIN][DOCKER]', 'Reusing ACTIVE container %s ' % old_container_id)
return container_id, container_ip
def launch_container(self):
old_container_id = None
container_name = self.peer_ip
if not self.reuse_container:
container_name = container_name + "-" + str(uuid.uuid4())
if self.reuse_container:
try:
# Get container id
container_data = self.connection.inspect_container(self.peer_ip)
old_container_id = container_data['Id']
# Will fail if container isn't alive:
self.container_id, self.container_ip = self.try_reuse_alive_container(old_container_id)
return {"id": self.container_id, "ip": self.container_ip}
except Exception:
pass
if old_container_id:
self.container_id = old_container_id
log.msg(log.LGREEN, '[PLUGIN][DOCKER]', 'Reusing container %s ' % self.container_id)
self.connection.restart(self.container_id)
if not self.container_id:
host_config = self.connection.create_host_config(pids_limit=self.pids_limit, mem_limit=self.mem_limit,
memswap_limit=self.memswap_limit, shm_size=self.shm_size,
cpu_period=self.cpu_period, cpu_shares=self.cpu_shares,
cpuset_cpus=self.cpuset_cpus)
self.container_id = \
self.connection.create_container(image=self.image, hostname=self.hostname,
name=container_name, host_config=host_config)['Id']
self.connection.start(self.container_id)
exec_id = self.connection.exec_create(self.container_id, self.launch_cmd)['Id']
self.connection.exec_start(exec_id)
container_data = self.connection.inspect_container(self.container_id)
self.container_ip = container_data['NetworkSettings']['Networks']['bridge']['IPAddress']
log.msg(log.LCYAN, '[PLUGIN][DOCKER]',
'Launched container (%s, %s)' % (self.container_ip, self.container_id))
return {"id": self.container_id, "ip": self.container_ip}
def teardown_container(self, destroy_container):
print("DESTROYING CONTAINER WITH ID: " + str(self.container_id))
if self.watcher is not None:
self.watcher.unschedule_all()
log.msg(log.LCYAN, '[PLUGIN][DOCKER]', 'Filesystem watcher stopped')
if self.syncing:
sync(self.mount_dir, self.overlay_folder, action='sync')
self.connection.stop(self.container_id)
log.msg(log.LCYAN, '[PLUGIN][DOCKER]',
'Stopped container (%s, %s)' % (self.container_ip, self.container_id))
# Check for container reuse
if not self.reuse_container or destroy_container:
self.connection.remove_container(self.container_id, force=True)
log.msg(log.LCYAN, '[PLUGIN][DOCKER]',
'Destroyed container (%s, %s)' % (self.container_ip, self.container_id))
def _file_get_contents(self, filename):
with open(filename) as f:
return f.read()
def start_watcher(self, dest_path, max_filesize, use_revisions):
if self.watcher is None:
self.overlay_folder = dest_path
self.max_filesize = max_filesize
self.use_revisions = use_revisions
# Check if watching should be started
if len(self.overlay_folder) > 0:
# Create overlay folder if needed
if not os.path.exists(self.overlay_folder):
os.makedirs(self.overlay_folder)
os.chmod(self.overlay_folder, 0755)
self._start_inotify()
def _start_inotify(self):
docker_info = self.connection.info()
docker_root = docker_info['DockerRootDir']
storage_driver = docker_info['Driver']
supported_storage = {
'aufs': '%s/%s/mnt/%s', # -> /var/lib/docker/aufs/mnt/<mount-id>
'btrfs': '%s/%s/subvolumes/%s', # -> /var/lib/docker/btrfs/subvolumes/<mount-id>
'overlay': '%s/%s/%s/diff', # -> /var/lib/docker/overlay/<mount-id>/diff
'overlay2': '%s/%s/%s/diff' # -> /var/lib/docker/overlay2/<mount-id>/diff
}
if storage_driver in supported_storage:
# Get container mount id
mount_id = self._file_get_contents(
('%s/image/%s/layerdb/mounts/%s/mount-id' % (docker_root, storage_driver, self.container_id)))
# construct mount path
self.mount_dir = supported_storage[storage_driver] % (docker_root, storage_driver, mount_id)
log.msg(log.LGREEN, '[PLUGIN][DOCKER]', 'Starting filesystem watcher at %s' % self.mount_dir)
try:
# Create watcher and start watching
# self.watcher = Observer()
# event_handler = DockerFileSystemEventHandler(self.overlay_folder, self.mount_dir,
# self.max_filesize, self.use_revisions)
# self.watcher.schedule(event_handler, self.mount_dir, recursive=True)
# self.watcher.start()
self.syncing = True
sync(self.mount_dir, self.overlay_folder, action='sync', create=True, force=True)
log.msg(log.LGREEN, '[PLUGIN][DOCKER]', 'Filesystem watcher started')
except Exception as exc:
log.msg(log.LRED, '[PLUGIN][DOCKER]', 'Failed to start filesystem watcher "%s"' % str(exc))
else:
log.msg(log.LRED, '[PLUGIN][DOCKER]',
'Filesystem watcher not supported for storage driver "%s"' % storage_driver)
| 45.800995
| 118
| 0.635564
|
3d43a309f82b814ef6fd49c71f81cc9e6012ebcb
| 13,892
|
py
|
Python
|
benchmark_helper.py
|
raikonenfnu/transformer-benchmarks
|
7edcae20c319262620127b2cb83c0d19ef7dea0f
|
[
"BSD-3-Clause"
] | 14
|
2021-11-08T15:09:29.000Z
|
2022-03-19T22:35:43.000Z
|
benchmark_helper.py
|
raikonenfnu/transformer-benchmarks
|
7edcae20c319262620127b2cb83c0d19ef7dea0f
|
[
"BSD-3-Clause"
] | 19
|
2021-08-21T08:43:10.000Z
|
2022-03-18T21:52:45.000Z
|
benchmark_helper.py
|
raikonenfnu/transformer-benchmarks
|
7edcae20c319262620127b2cb83c0d19ef7dea0f
|
[
"BSD-3-Clause"
] | 3
|
2019-01-08T12:19:04.000Z
|
2020-05-09T21:33:12.000Z
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import os
import sys
import csv
import numpy
import time
import timeit
from datetime import datetime
import argparse
import logging
import coloredlogs
import torch
import onnx
from enum import Enum
from packaging import version
logger = logging.getLogger(__name__)
class Precision(Enum):
FLOAT32 = 'fp32'
FLOAT16 = 'fp16'
INT8 = 'int8'
def __str__(self):
return self.value
IO_BINDING_DATA_TYPE_MAP = {
"float32": numpy.float32,
# TODO: Add more.
}
def create_onnxruntime_session(onnx_model_path,
use_gpu,
enable_all_optimization=True,
num_threads=-1,
enable_profiling=False,
verbose=False):
session = None
try:
from onnxruntime import SessionOptions, InferenceSession, GraphOptimizationLevel, __version__ as onnxruntime_version
sess_options = SessionOptions()
if enable_all_optimization:
sess_options.graph_optimization_level = GraphOptimizationLevel.ORT_ENABLE_ALL
else:
sess_options.graph_optimization_level = GraphOptimizationLevel.ORT_ENABLE_BASIC
if enable_profiling:
sess_options.enable_profiling = True
if num_threads > 0:
sess_options.intra_op_num_threads = num_threads
logger.debug(f"Session option: intra_op_num_threads={sess_options.intra_op_num_threads}")
if verbose:
sess_options.log_severity_level = 0
else:
sess_options.log_severity_level = 4
logger.debug(f"Create session for onnx model: {onnx_model_path}")
execution_providers = ['CPUExecutionProvider'
] if not use_gpu else ['CUDAExecutionProvider', 'CPUExecutionProvider']
session = InferenceSession(onnx_model_path, sess_options, providers=execution_providers)
except:
logger.error(f"Exception", exc_info=True)
return session
def setup_logger(verbose=True):
if verbose:
coloredlogs.install(level='DEBUG', fmt='[%(filename)s:%(lineno)s - %(funcName)20s()] %(message)s')
else:
coloredlogs.install(fmt='%(message)s')
logging.getLogger("transformers").setLevel(logging.WARNING)
def prepare_environment(cache_dir, output_dir, use_gpu):
if cache_dir and not os.path.exists(cache_dir):
os.makedirs(cache_dir)
if output_dir and not os.path.exists(output_dir):
os.makedirs(output_dir)
import onnxruntime
if use_gpu:
assert 'CUDAExecutionProvider' in onnxruntime.get_available_providers(
), "Please install onnxruntime-gpu package to test GPU inference."
import transformers
logger.info(f'PyTorch Version:{torch.__version__}')
logger.info(f'Transformers Version:{transformers.__version__}')
logger.info(f'Onnxruntime Version:{onnxruntime.__version__}')
# Support three major versions of PyTorch and OnnxRuntime, and up to 6 months of transformers.
from packaging import version
assert version.parse(torch.__version__) >= version.parse('1.5.0')
assert version.parse(transformers.__version__) >= version.parse('3.0.0')
assert version.parse(onnxruntime.__version__) >= version.parse('1.4.0')
def get_latency_result(runtimes, batch_size):
latency_ms = sum(runtimes) / float(len(runtimes)) * 1000.0
latency_variance = numpy.var(runtimes, dtype=numpy.float64) * 1000.0
throughput = batch_size * (1000.0 / latency_ms)
return {
"test_times": len(runtimes),
"latency_variance": "{:.2f}".format(latency_variance),
"latency_90_percentile": "{:.2f}".format(numpy.percentile(runtimes, 90) * 1000.0),
"latency_95_percentile": "{:.2f}".format(numpy.percentile(runtimes, 95) * 1000.0),
"latency_99_percentile": "{:.2f}".format(numpy.percentile(runtimes, 99) * 1000.0),
"average_latency_ms": "{:.2f}".format(latency_ms),
"QPS": "{:.2f}".format(throughput),
}
def output_details(results, csv_filename):
with open(csv_filename, mode="a", newline='') as csv_file:
column_names = [
"engine", "version", "device", "precision", "optimizer", "io_binding", "model_name", "inputs", "threads",
"batch_size", "sequence_length", "datetime", "test_times", "QPS", "average_latency_ms", "latency_variance",
"latency_90_percentile", "latency_95_percentile", "latency_99_percentile"
]
csv_writer = csv.DictWriter(csv_file, fieldnames=column_names)
csv_writer.writeheader()
for result in results:
csv_writer.writerow(result)
logger.info(f"Detail results are saved to csv file: {csv_filename}")
def output_summary(results, csv_filename, args):
with open(csv_filename, mode="a", newline='') as csv_file:
header_names = [
"model_name", "inputs", "engine", "version", "device", "precision", "optimizer", "io_binding", "threads"
]
data_names = []
for batch_size in args.batch_sizes:
for sequence_length in args.sequence_lengths:
data_names.append(f"b{batch_size}_s{sequence_length}")
csv_writer = csv.DictWriter(csv_file, fieldnames=header_names + data_names)
csv_writer.writeheader()
for model_name in args.models:
for input_count in [1, 2, 3]:
for engine_name in args.engines:
for io_binding in [True, False, ""]:
for threads in args.num_threads:
row = {}
for result in results:
if result["model_name"] == model_name and result["inputs"] == input_count and result[
"engine"] == engine_name and result["io_binding"] == io_binding and result[
"threads"] == threads:
headers = {k: v for k, v in result.items() if k in header_names}
if not row:
row.update(headers)
row.update({k: "" for k in data_names})
else:
for k in header_names:
assert row[k] == headers[k]
b = result["batch_size"]
s = result["sequence_length"]
row[f"b{b}_s{s}"] = result["average_latency_ms"]
if row:
csv_writer.writerow(row)
logger.info(f"Summary results are saved to csv file: {csv_filename}")
def output_fusion_statistics(model_fusion_statistics, csv_filename):
from transformers import __version__ as transformers_version
with open(csv_filename, mode="a", newline='') as csv_file:
column_names = ["model_filename", "datetime", "transformers", "torch"] + list(
next(iter(model_fusion_statistics.values())).keys())
csv_writer = csv.DictWriter(csv_file, fieldnames=column_names)
csv_writer.writeheader()
for key in model_fusion_statistics.keys():
model_fusion_statistics[key]["datetime"] = str(datetime.now())
model_fusion_statistics[key]["transformers"] = transformers_version
model_fusion_statistics[key]["torch"] = torch.__version__
model_fusion_statistics[key]["model_filename"] = key
csv_writer.writerow(model_fusion_statistics[key])
logger.info(f"Fusion statistics is saved to csv file: {csv_filename}")
def inference_ort(ort_session, ort_inputs, result_template, repeat_times, batch_size):
result = {}
runtimes = timeit.repeat(lambda: ort_session.run(None, ort_inputs), number=1, repeat=repeat_times)
result.update(result_template)
result.update({"io_binding": False})
result.update(get_latency_result(runtimes, batch_size))
return result
def inference_ort_with_io_binding(ort_session,
ort_inputs,
result_template,
repeat_times,
ort_output_names,
ort_outputs,
output_buffers,
output_buffer_max_sizes,
batch_size,
device,
data_type=numpy.longlong):
result = {}
# Bind inputs and outputs to onnxruntime session
io_binding = ort_session.io_binding()
# Bind inputs to device
for name in ort_inputs.keys():
np_input = torch.from_numpy(ort_inputs[name]).to(device)
input_type = IO_BINDING_DATA_TYPE_MAP[str(ort_inputs[name].dtype)] if str(
ort_inputs[name].dtype) in IO_BINDING_DATA_TYPE_MAP else data_type
io_binding.bind_input(name, np_input.device.type, 0, input_type, np_input.shape, np_input.data_ptr())
# Bind outputs buffers with the sizes needed if not allocated already
if len(output_buffers) == 0:
allocateOutputBuffers(output_buffers, output_buffer_max_sizes, device)
for i in range(len(ort_output_names)):
io_binding.bind_output(ort_output_names[i], output_buffers[i].device.type, 0, numpy.float32,
ort_outputs[i].shape, output_buffers[i].data_ptr())
runtimes = timeit.repeat(lambda: ort_session.run_with_iobinding(io_binding), number=1, repeat=repeat_times)
result.update(result_template)
result.update({"io_binding": True})
result.update(get_latency_result(runtimes, batch_size))
return result
def allocateOutputBuffers(output_buffers, output_buffer_max_sizes, device):
# Allocate output tensors with the largest test size needed. So the allocated memory can be reused
# for each test run.
for i in output_buffer_max_sizes:
output_buffers.append(torch.empty(i, dtype=torch.float32, device=device))
def set_random_seed(seed=123):
"""Set random seed manully to get deterministic results"""
import random
random.seed(seed)
numpy.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
#torch.backends.cudnn.enabled = False
#torch.backends.cudnn.benchmark = False
#torch.backends.cudnn.deterministic = True
def measure_memory(is_gpu, func):
import os
import psutil
from time import sleep
class MemoryMonitor:
def __init__(self, keep_measuring=True):
self.keep_measuring = keep_measuring
def measure_cpu_usage(self):
max_usage = 0
while True:
max_usage = max(max_usage, psutil.Process(os.getpid()).memory_info().rss / 1024**2)
sleep(0.005) # 5ms
if not self.keep_measuring:
break
return max_usage
def measure_gpu_usage(self):
from py3nvml.py3nvml import nvmlInit, nvmlDeviceGetCount, nvmlDeviceGetHandleByIndex, \
nvmlDeviceGetMemoryInfo, nvmlDeviceGetName, nvmlShutdown, NVMLError
max_gpu_usage = []
gpu_name = []
try:
nvmlInit()
deviceCount = nvmlDeviceGetCount()
max_gpu_usage = [0 for i in range(deviceCount)]
gpu_name = [nvmlDeviceGetName(nvmlDeviceGetHandleByIndex(i)) for i in range(deviceCount)]
while True:
for i in range(deviceCount):
info = nvmlDeviceGetMemoryInfo(nvmlDeviceGetHandleByIndex(i))
max_gpu_usage[i] = max(max_gpu_usage[i], info.used / 1024**2)
sleep(0.005) # 5ms
if not self.keep_measuring:
break
nvmlShutdown()
return [{
"device_id": i,
"name": gpu_name[i],
"max_used_MB": max_gpu_usage[i]
} for i in range(deviceCount)]
except NVMLError as error:
if not self.silent:
self.logger.error("Error fetching GPU information using nvml: %s", error)
return None
monitor = MemoryMonitor(False)
memory_before_test = monitor.measure_gpu_usage() if is_gpu else monitor.measure_cpu_usage()
from concurrent.futures import ThreadPoolExecutor
with ThreadPoolExecutor() as executor:
monitor = MemoryMonitor()
mem_thread = executor.submit(monitor.measure_gpu_usage if is_gpu else monitor.measure_cpu_usage)
try:
fn_thread = executor.submit(func)
result = fn_thread.result()
finally:
monitor.keep_measuring = False
max_usage = mem_thread.result()
if is_gpu:
print(f"GPU memory usage: before={memory_before_test} peak={max_usage}")
if len(memory_before_test) >= 1 and len(max_usage) >= 1:
before = memory_before_test[0]["max_used_MB"]
after = max_usage[0]["max_used_MB"]
return after - before
else:
return None
else:
print(f"CPU memory usage: before={memory_before_test:.1f} MB, peak={max_usage:.1f} MB")
return max_usage - memory_before_test
| 41.717718
| 124
| 0.607976
|
f4126de6d79a27064608aec94e13eee944cd9edb
| 1,585
|
py
|
Python
|
src/benchmarkstt/cli/main.py
|
ebu/benchmarkstt
|
3235d65661a4b771403e0369fac02c05e58c8974
|
[
"MIT"
] | 30
|
2019-04-26T14:57:47.000Z
|
2022-01-31T13:59:12.000Z
|
src/benchmarkstt/cli/main.py
|
ebu/benchmarkstt
|
3235d65661a4b771403e0369fac02c05e58c8974
|
[
"MIT"
] | 86
|
2019-04-24T13:42:48.000Z
|
2022-02-22T08:57:10.000Z
|
src/benchmarkstt/cli/main.py
|
ebu/ai-benchmarking
|
3235d65661a4b771403e0369fac02c05e58c8974
|
[
"MIT"
] | 7
|
2019-06-28T13:08:50.000Z
|
2022-02-09T12:37:27.000Z
|
import logging
import sys
from contextlib import contextmanager
from benchmarkstt import __meta__
from benchmarkstt.cli import create_parser, args_help, args_common, before_parseargs, args_complete
@contextmanager
def parser_context():
try:
# import done here to avoid circular references
import benchmarkstt.cli.entrypoints.benchmark as benchmark_cli
name = 'benchmarkstt'
desc = 'BenchmarkSTT\'s main command line tool that is used for benchmarking speech-to-text, ' \
'for additional tools, see ``benchmarkstt-tools --help``.'
argparser = create_parser(prog=name, description=desc)
benchmark_cli.argparser(argparser)
argparser.add_argument('--version', action='store_true',
help='Output %s version number' % (name,))
args_common(argparser)
args_help(argparser)
yield argparser
finally:
pass
def argparser():
with parser_context() as parser:
return parser
def run():
before_parseargs()
# import done here to avoid circular dependencies
import benchmarkstt.cli.entrypoints.benchmark as entrypoint
with parser_context() as parser:
args_complete(parser)
if '--version' in sys.argv:
print("benchmarkstt: %s" % (__meta__.__version__,))
logging.getLogger().info('python version: %s', sys.version)
parser.exit(0)
args = parser.parse_args()
entrypoint.run(parser, args)
exit(0)
if __name__ == '__main__': # pragma: nocover
run()
| 28.303571
| 104
| 0.660568
|
2babd8c251bb2f286037eee86de1b8204199118c
| 1,077
|
py
|
Python
|
2016/aoc2016_8a.py
|
ByteCommander/AdventOfCode
|
daf8ffd88892e997e0cc763a29eaf2122585c4f9
|
[
"MIT"
] | 2
|
2017-12-03T23:56:52.000Z
|
2017-12-04T09:49:48.000Z
|
2016/aoc2016_8a.py
|
ByteCommander/AdventOfCode
|
daf8ffd88892e997e0cc763a29eaf2122585c4f9
|
[
"MIT"
] | null | null | null |
2016/aoc2016_8a.py
|
ByteCommander/AdventOfCode
|
daf8ffd88892e997e0cc763a29eaf2122585c4f9
|
[
"MIT"
] | null | null | null |
# Advent Of Code 2016, day 8, part 1
# http://adventofcode.com/2016/day/8
# solution by ByteCommander, 2017-12-02
data = open("inputs/aoc2016_8.txt").read()
board = [[0 for x in range(50)] for y in range(6)]
def rot_row(row, by):
global board
board[row] = board[row][-by:] + board[row][:-by]
def rot_col(col, by):
global board
board = list(map(list, zip(*board)))
rot_row(col, by)
board = list(map(list, zip(*board)))
for line in data.splitlines():
# print("\n" + line)
cmd, *args = line.split()
if cmd == "rect":
a, b = map(int, args[0].split("x"))
for x in range(a):
for y in range(b):
board[y][x] = 1
elif cmd == "rotate":
direction, _a, _, b = args
a, b = int(_a.split("=")[-1]), int(b)
if direction == "row":
rot_row(a, b)
else:
rot_col(a, b)
pass
# print(*["|" + "".join("#" if c else " " for c in row) + "|"
# for row in board], sep="\n")
print("Answer: {} LEDs lit".format(sum(map(sum, board))))
| 23.413043
| 65
| 0.519963
|
17628ffdda30e5edb1e26fea9ec9f2ed9059bacf
| 2,295
|
py
|
Python
|
desktop/core/ext-py/Django-1.11.20/tests/gis_tests/layermap/models.py
|
maulikjs/hue
|
59ac879b55bb6fb26ecb4e85f4c70836fc21173f
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
tests/gis_tests/layermap/models.py
|
287977288/test
|
142e3626ab3c676574631383ae6b5a4eced5a10e
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
tests/gis_tests/layermap/models.py
|
287977288/test
|
142e3626ab3c676574631383ae6b5a4eced5a10e
|
[
"PSF-2.0",
"BSD-3-Clause"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class NamedModel(models.Model):
name = models.CharField(max_length=25)
class Meta:
abstract = True
def __str__(self):
return self.name
class State(NamedModel):
pass
class County(NamedModel):
state = models.ForeignKey(State, models.CASCADE)
mpoly = models.MultiPolygonField(srid=4269) # Multipolygon in NAD83
class CountyFeat(NamedModel):
poly = models.PolygonField(srid=4269)
class City(NamedModel):
name_txt = models.TextField(default='')
name_short = models.CharField(max_length=5)
population = models.IntegerField()
density = models.DecimalField(max_digits=7, decimal_places=1)
dt = models.DateField()
point = models.PointField()
class Meta:
app_label = 'layermap'
class Interstate(NamedModel):
length = models.DecimalField(max_digits=6, decimal_places=2)
path = models.LineStringField()
class Meta:
app_label = 'layermap'
# Same as `City` above, but for testing model inheritance.
class CityBase(NamedModel):
population = models.IntegerField()
density = models.DecimalField(max_digits=7, decimal_places=1)
point = models.PointField()
class ICity1(CityBase):
dt = models.DateField()
class Meta(CityBase.Meta):
pass
class ICity2(ICity1):
dt_time = models.DateTimeField(auto_now=True)
class Meta(ICity1.Meta):
pass
class Invalid(models.Model):
point = models.PointField()
# Mapping dictionaries for the models above.
co_mapping = {
'name': 'Name',
# ForeignKey's use another mapping dictionary for the _related_ Model (State in this case).
'state': {'name': 'State'},
'mpoly': 'MULTIPOLYGON', # Will convert POLYGON features into MULTIPOLYGONS.
}
cofeat_mapping = {'name': 'Name',
'poly': 'POLYGON',
}
city_mapping = {'name': 'Name',
'population': 'Population',
'density': 'Density',
'dt': 'Created',
'point': 'POINT',
}
inter_mapping = {'name': 'Name',
'length': 'Length',
'path': 'LINESTRING',
}
| 23.659794
| 95
| 0.642702
|
2a06d6172fd2526cf03417f52712acceb553093a
| 610
|
py
|
Python
|
sdk/python/tests/compiler/testdata/testpackage/mypipeline/__init__.py
|
hwk42/pipelines
|
c89ed71cf6339cdcdd957d4dca4b1f32c10db9c9
|
[
"Apache-2.0"
] | 1
|
2021-08-23T19:09:56.000Z
|
2021-08-23T19:09:56.000Z
|
sdk/python/tests/compiler/testdata/testpackage/mypipeline/__init__.py
|
hwk42/pipelines
|
c89ed71cf6339cdcdd957d4dca4b1f32c10db9c9
|
[
"Apache-2.0"
] | 2
|
2021-06-01T10:02:51.000Z
|
2021-06-07T07:19:14.000Z
|
sdk/python/tests/compiler/testdata/testpackage/mypipeline/__init__.py
|
hwk42/pipelines
|
c89ed71cf6339cdcdd957d4dca4b1f32c10db9c9
|
[
"Apache-2.0"
] | 3
|
2022-01-10T13:40:24.000Z
|
2022-03-21T08:46:14.000Z
|
# Copyright 2018 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .compose import *
| 35.882353
| 74
| 0.760656
|
cbac45dbdff3c4b6c1e61327bcbd8fa246b330df
| 1,105
|
py
|
Python
|
card_reader.py
|
JustinPead/card_authenticator
|
0ad8a0a05a5fccf1e392a2e1caed17ccf2ca9e59
|
[
"MIT"
] | null | null | null |
card_reader.py
|
JustinPead/card_authenticator
|
0ad8a0a05a5fccf1e392a2e1caed17ccf2ca9e59
|
[
"MIT"
] | null | null | null |
card_reader.py
|
JustinPead/card_authenticator
|
0ad8a0a05a5fccf1e392a2e1caed17ccf2ca9e59
|
[
"MIT"
] | null | null | null |
import serial
import logging
import time
class CardReader:
def __init__(self,logger=logging.getLogger(__name__)):
self.logger = logger
#Open COM port
port = "/dev/ttyACM0" #hardcoded for linux
self.ser = serial.Serial(port,baudrate=9600,parity=serial.PARITY_ODD,stopbits=serial.STOPBITS_TWO,bytesize=serial.SEVENBITS)
self.logger.info("{p} port established".format(p = port))
def get_tag_id(self):
tag_length = 14
self.ser.read(self.ser.inWaiting()) #flushing the system.
time.sleep(0.1)
while(self.ser.inWaiting()>0):
self.ser.read(self.ser.inWaiting()) #flushing the system.
self.logger.debug("Data still coming in - Flushing Loop")
time.sleep(0.1)
self.logger.debug("Waiting for Data")
while(self.ser.inWaiting()<tag_length):
pass
value = self.ser.read(tag_length)
value = value.decode("utf-8")
value = int(value[1:-3],16)
self.logger.debug("Value: {v}".format(v = value))
return value
| 38.103448
| 133
| 0.611765
|
f6b9209c0d1653269b5ff4bd13a0951895f938bc
| 3,541
|
py
|
Python
|
tests/text_test.py
|
mkorman9/ai_slovenian_press
|
8087b9379e0cd3464d5aa21f6bf1bbff19a69fdf
|
[
"MIT"
] | null | null | null |
tests/text_test.py
|
mkorman9/ai_slovenian_press
|
8087b9379e0cd3464d5aa21f6bf1bbff19a69fdf
|
[
"MIT"
] | null | null | null |
tests/text_test.py
|
mkorman9/ai_slovenian_press
|
8087b9379e0cd3464d5aa21f6bf1bbff19a69fdf
|
[
"MIT"
] | null | null | null |
import slovenian_press.text
import unittest
import mock
from assertpy import assert_that
class TextTest(unittest.TestCase):
def test_text_processing_chain_should_call_all_processors(self):
# given
processor1 = mock.MagicMock(spec=slovenian_press.text.TextProcessor)
processor2 = mock.MagicMock(spec=slovenian_press.text.TextProcessor)
processing_chain = slovenian_press.text.TextProcessingChain()
processing_chain.register(processor1)
processing_chain.register(processor2)
# when
processing_chain.process('input_text')
# then
processor1.process.assert_called_once()
processor2.process.assert_called_once()
class TestArticlesProvider(unittest.TestCase):
def test_articles_should_be_retrieved_from_empty_datasource(self):
self._test_articles_provider([], [], [], [])
def test_articles_should_be_retrieved_from_datasource_with_single_record(self):
self._test_articles_provider([{'specialCoverage': [123],
'text': [u'xyzw'],
'id': ['456'],
'headline': u'l',
'keywords': [u'a', u'b']}],
expected_ids=['456'],
expected_target_names=['123'],
expected_data=['xyzw'])
def test_articles_should_be_retrieved_from_datasource_with_multiple_records(self):
self._test_articles_provider([{'specialCoverage': [123],
'text': [u'xyz'],
'id': ['456'],
'headline': u'l',
'keywords': [u'a', u'b']},
{'specialCoverage': [666],
'text': [u'zyxz'],
'id': ['567'],
'headline': u'l',
'keywords': [u'abcd', u'bbcd']}],
expected_ids=['456', '567'],
expected_target_names=['123', '666'],
expected_data=['', 'zyxz abcd bbcd'])
def test_articles_should_be_retrieved_from_datasource_with_record_with_no_text_field(self):
self._test_articles_provider([{'specialCoverage': [123],
'headline': [u'xyz'],
'id': ['456'],
'keywords': [u'a', u'b']}],
expected_ids=['456'],
expected_target_names=['123'],
expected_data=[''])
def _test_articles_provider(self, input, expected_ids, expected_target_names, expected_data):
# given
datasource_mock = mock.MagicMock(spec=slovenian_press.configuration.AbstractDatasourceReader)
datasource_mock.read_json.return_value = input
articles_provider = slovenian_press.text.ArticlesProvider(datasource_mock)
# when
result = articles_provider.provide()
# then
assert_that(result.id).is_equal_to(expected_ids)
assert_that(result.target_names).is_equal_to(expected_target_names)
assert_that([data.strip() for data in result.data]).is_equal_to(expected_data)
| 45.987013
| 101
| 0.521886
|
ffa3809694b1b0373a6010eb43d6ee390e88bdc8
| 9,253
|
py
|
Python
|
model/FPENet.py
|
ZAKAUDD/Segmentation-Networks
|
0f0c32e7af3463d381cb184a158ff60e16f7fb9a
|
[
"MIT"
] | 743
|
2019-10-07T08:32:31.000Z
|
2022-03-31T12:06:23.000Z
|
model/FPENet.py
|
Super-Iron-Man/Efficient-Segmentation-Networks
|
7e006809a7345819ebc50326175df156beeca618
|
[
"MIT"
] | 23
|
2019-12-02T12:35:23.000Z
|
2021-11-29T17:28:59.000Z
|
model/FPENet.py
|
Super-Iron-Man/Efficient-Segmentation-Networks
|
7e006809a7345819ebc50326175df156beeca618
|
[
"MIT"
] | 140
|
2019-10-09T01:02:51.000Z
|
2022-03-08T01:46:14.000Z
|
###################################################################################################
#FPENet:Feature Pyramid Encoding Network for Real-time Semantic Segmentation
#Paper-Link: https://arxiv.org/pdf/1909.08599v1.pdf
###################################################################################################
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchsummary import summary
__all__ = ["FPENet"]
def conv3x3(in_planes, out_planes, stride=1, padding=1, dilation=1, groups=1, bias=False):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=padding, dilation=dilation, groups=groups,bias=bias)
def conv1x1(in_planes, out_planes, stride=1, bias=False):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=bias)
class SEModule(nn.Module):
def __init__(self, channels, reduction=16):
super(SEModule, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Conv2d(channels, channels // reduction, kernel_size=1, padding=0)
self.relu = nn.ReLU(inplace=True)
self.fc2 = nn.Conv2d(channels // reduction, channels, kernel_size=1, padding=0)
self.sigmoid = nn.Sigmoid()
def forward(self, input):
x = self.avg_pool(input)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return input * x
class FPEBlock(nn.Module):
def __init__(self, inplanes, outplanes, dilat, downsample=None, stride=1, t=1, scales=4, se=False, norm_layer=None):
super(FPEBlock, self).__init__()
if inplanes % scales != 0:
raise ValueError('Planes must be divisible by scales')
if norm_layer is None:
norm_layer = nn.BatchNorm2d
bottleneck_planes = inplanes * t
self.conv1 = conv1x1(inplanes, bottleneck_planes, stride)
self.bn1 = norm_layer(bottleneck_planes)
self.conv2 = nn.ModuleList([conv3x3(bottleneck_planes // scales, bottleneck_planes // scales,
groups=(bottleneck_planes // scales),dilation=dilat[i],
padding=1*dilat[i]) for i in range(scales)])
self.bn2 = nn.ModuleList([norm_layer(bottleneck_planes // scales) for _ in range(scales)])
self.conv3 = conv1x1(bottleneck_planes, outplanes)
self.bn3 = norm_layer(outplanes)
self.relu = nn.ReLU(inplace=True)
self.se = SEModule(outplanes) if se else None
self.downsample = downsample
self.stride = stride
self.scales = scales
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
xs = torch.chunk(out, self.scales, 1)
ys = []
for s in range(self.scales):
if s == 0:
ys.append(self.relu(self.bn2[s](self.conv2[s](xs[s]))))
else:
ys.append(self.relu(self.bn2[s](self.conv2[s](xs[s] + ys[-1]))))
out = torch.cat(ys, 1)
out = self.conv3(out)
out = self.bn3(out)
if self.se is not None:
out = self.se(out)
if self.downsample is not None:
identity = self.downsample(identity)
out += identity
out = self.relu(out)
return out
class MEUModule(nn.Module):
def __init__(self, channels_high, channels_low, channel_out):
super(MEUModule, self).__init__()
self.conv1x1_low = nn.Conv2d(channels_low, channel_out, kernel_size=1, bias=False)
self.bn_low = nn.BatchNorm2d(channel_out)
self.sa_conv = nn.Conv2d(1, 1, kernel_size=1, bias=False)
self.conv1x1_high = nn.Conv2d(channels_high, channel_out, kernel_size=1, bias=False)
self.bn_high = nn.BatchNorm2d(channel_out)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.ca_conv = nn.Conv2d(channel_out, channel_out, kernel_size=1, bias=False)
self.sa_sigmoid = nn.Sigmoid()
self.ca_sigmoid = nn.Sigmoid()
self.relu = nn.ReLU(inplace=True)
def forward(self, fms_high, fms_low):
"""
:param fms_high: High level Feature map. Tensor.
:param fms_low: Low level Feature map. Tensor.
"""
_, _, h, w = fms_low.shape
#
fms_low = self.conv1x1_low(fms_low)
fms_low= self.bn_low(fms_low)
sa_avg_out = self.sa_sigmoid(self.sa_conv(torch.mean(fms_low, dim=1, keepdim=True)))
#
fms_high = self.conv1x1_high(fms_high)
fms_high = self.bn_high(fms_high)
ca_avg_out = self.ca_sigmoid(self.relu(self.ca_conv(self.avg_pool(fms_high))))
#
fms_high_up = F.interpolate(fms_high, size=(h,w), mode='bilinear', align_corners=True)
fms_sa_att = sa_avg_out * fms_high_up
#
fms_ca_att = ca_avg_out * fms_low
out = fms_ca_att + fms_sa_att
return out
class FPENet(nn.Module):
def __init__(self, classes=19, zero_init_residual=False,
width=16, scales=4, se=False, norm_layer=None):
super(FPENet, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
outplanes = [int(width * 2 ** i) for i in range(3)] # planes=[16,32,64]
self.block_num = [1,3,9]
self.dilation = [1,2,4,8]
self.inplanes = outplanes[0]
self.conv1 = nn.Conv2d(3, outplanes[0], kernel_size=3, stride=2, padding=1,bias=False)
self.bn1 = norm_layer(outplanes[0])
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_layer(FPEBlock, outplanes[0], self.block_num[0], dilation=self.dilation,
stride=1, t=1, scales=scales, se=se, norm_layer=norm_layer)
self.layer2 = self._make_layer(FPEBlock, outplanes[1], self.block_num[1], dilation=self.dilation,
stride=2, t=4, scales=scales, se=se, norm_layer=norm_layer)
self.layer3 = self._make_layer(FPEBlock, outplanes[2], self.block_num[2], dilation=self.dilation,
stride=2, t=4, scales=scales, se=se, norm_layer=norm_layer)
self.meu1 = MEUModule(64,32,64)
self.meu2 = MEUModule(64,16,32)
# Projection layer
self.project_layer = nn.Conv2d(32, classes, kernel_size = 1)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, FPEBlock):
nn.init.constant_(m.bn3.weight, 0)
def _make_layer(self, block, planes, blocks, dilation, stride=1, t=1, scales=4, se=False, norm_layer=None):
if norm_layer is None:
norm_layer = nn.BatchNorm2d
downsample = None
if stride != 1 or self.inplanes != planes:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes, stride),
norm_layer(planes),
)
layers = []
layers.append(block(self.inplanes, planes, dilat=dilation, downsample=downsample, stride=stride, t=t, scales=scales, se=se,
norm_layer=norm_layer))
self.inplanes = planes
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes, dilat=dilation, scales=scales, se=se, norm_layer=norm_layer))
return nn.Sequential(*layers)
def forward(self, x):
## stage 1
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x_1 = self.layer1(x)
## stage 2
x_2_0 = self.layer2[0](x_1)
x_2_1 = self.layer2[1](x_2_0)
x_2_2 = self.layer2[2](x_2_1)
x_2 = x_2_0 + x_2_2
## stage 3
x_3_0 = self.layer3[0](x_2)
x_3_1 = self.layer3[1](x_3_0)
x_3_2 = self.layer3[2](x_3_1)
x_3_3 = self.layer3[3](x_3_2)
x_3_4 = self.layer3[4](x_3_3)
x_3_5 = self.layer3[5](x_3_4)
x_3_6 = self.layer3[6](x_3_5)
x_3_7 = self.layer3[7](x_3_6)
x_3_8 = self.layer3[8](x_3_7)
x_3 = x_3_0 + x_3_8
x2 = self.meu1(x_3, x_2)
x1 = self.meu2(x2, x_1)
output = self.project_layer(x1)
# Bilinear interpolation x2
output = F.interpolate(output,scale_factor=2, mode = 'bilinear', align_corners=True)
return output
"""print layers and params of network"""
if __name__ == '__main__':
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = FPENet(classes=19).to(device)
summary(model,(3,512,1024))
| 36.864542
| 131
| 0.591376
|
88e400539120c067caf6fcaaea7fdd8a86ef63e5
| 3,883
|
py
|
Python
|
venv/Lib/site-packages/botocore/__init__.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
venv/Lib/site-packages/botocore/__init__.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
venv/Lib/site-packages/botocore/__init__.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import re
import logging
__version__ = '1.8.3'
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Configure default logger to do nothing
log = logging.getLogger('botocore')
log.addHandler(NullHandler())
_first_cap_regex = re.compile('(.)([A-Z][a-z]+)')
_number_cap_regex = re.compile('([a-z])([0-9]+)')
_end_cap_regex = re.compile('([a-z0-9])([A-Z])')
# The regex below handles the special case where some acryonym
# name is pluralized, e.g GatewayARNs, ListWebACLs, SomeCNAMEs.
_special_case_transform = re.compile('[A-Z]{3,}s$')
# Prepopulate the cache with special cases that don't match
# our regular transformation.
_xform_cache = {
('CreateCachediSCSIVolume', '_'): 'create_cached_iscsi_volume',
('CreateCachediSCSIVolume', '-'): 'create-cached-iscsi-volume',
('DescribeCachediSCSIVolumes', '_'): 'describe_cached_iscsi_volumes',
('DescribeCachediSCSIVolumes', '-'): 'describe-cached-iscsi-volumes',
('DescribeStorediSCSIVolumes', '_'): 'describe_stored_iscsi_volumes',
('DescribeStorediSCSIVolumes', '-'): 'describe-stored-iscsi-volumes',
('CreateStorediSCSIVolume', '_'): 'create_stored_iscsi_volume',
('CreateStorediSCSIVolume', '-'): 'create-stored-iscsi-volume',
('ListHITsForQualificationType', '_'): 'list_hits_for_qualification_type',
('ListHITsForQualificationType', '-'): 'list-hits-for-qualification-type',
}
# The items in this dict represent partial renames to apply globally to all
# services which might have a matching argument or operation. This way a
# common mis-translation can be fixed without having to call out each
# individual case.
_partial_renames = {
'ipv-6': 'ipv6',
'ipv_6': 'ipv6',
}
ScalarTypes = ('string', 'integer', 'boolean', 'timestamp', 'float', 'double')
BOTOCORE_ROOT = os.path.dirname(os.path.abspath(__file__))
# Used to specify anonymous (unsigned) request signature
class UNSIGNED(object):
def __copy__(self):
return self
def __deepcopy__(self, memodict):
return self
UNSIGNED = UNSIGNED()
def xform_name(name, sep='_', _xform_cache=_xform_cache,
partial_renames=_partial_renames):
"""Convert camel case to a "pythonic" name.
If the name contains the ``sep`` character, then it is
returned unchanged.
"""
if sep in name:
# If the sep is in the name, assume that it's already
# transformed and return the string unchanged.
return name
key = (name, sep)
if key not in _xform_cache:
if _special_case_transform.search(name) is not None:
is_special = _special_case_transform.search(name)
matched = is_special.group()
# Replace something like ARNs, ACLs with _arns, _acls.
name = name[:-len(matched)] + sep + matched.lower()
s1 = _first_cap_regex.sub(r'\1' + sep + r'\2', name)
s2 = _number_cap_regex.sub(r'\1' + sep + r'\2', s1)
transformed = _end_cap_regex.sub(r'\1' + sep + r'\2', s2).lower()
# Do partial renames
for old, new in partial_renames.items():
if old in transformed:
transformed = transformed.replace(old, new)
_xform_cache[key] = transformed
return _xform_cache[key]
| 36.980952
| 78
| 0.689673
|
58c88f0a21700a7ea78c511beac9b47c763ba875
| 1,159
|
py
|
Python
|
release_files/setup_yo_fluq_ds.py
|
okulovsky/yo_ds
|
9e1fa2e7a1b9746c3982afc152c024169fec45ca
|
[
"MIT"
] | 16
|
2019-09-26T09:05:42.000Z
|
2021-02-04T01:39:09.000Z
|
release_files/setup_yo_fluq_ds.py
|
okulovsky/yo_ds
|
9e1fa2e7a1b9746c3982afc152c024169fec45ca
|
[
"MIT"
] | 2
|
2019-10-23T19:01:23.000Z
|
2020-06-11T09:08:45.000Z
|
release_files/setup_yo_fluq_ds.py
|
okulovsky/yo_ds
|
9e1fa2e7a1b9746c3982afc152c024169fec45ca
|
[
"MIT"
] | 2
|
2019-09-26T09:05:50.000Z
|
2019-10-23T18:46:11.000Z
|
from setuptools import setup, find_packages
def readme():
with open('README.md') as file:
return file.read()
setup(name='yo_fluq_ds',
version='VERSIONID',
description='Fluent interface for data processing, advanced toolkit for data science',
long_description=readme(),
long_description_content_type='text/markdown',
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules'
],
url='http://github.com/okulovsky/yo_ds',
author='Yuri Okulovsky',
author_email='yuri.okulovsky@gmail.com',
license='MIT',
packages=find_packages(),
install_requires=[
'pandas',
'matplotlib',
'numpy',
'tqdm',
'seaborn',
'pyaml',
'jsonpickle',
'ipython',
'ipywidgets',
'sklearn',
'yo_fluq==VERSIONID'
],
include_package_data = True,
zip_safe=False
)
| 29.717949
| 92
| 0.5522
|
58a2bea34786300a4b362fd68d15d1fcfbaf08fb
| 952
|
py
|
Python
|
MagicMirror/display/migrations/0001_initial.py
|
hu-tianyi/Magic-MIrror-Display
|
d0403118c87a02468be2fb01f8c9565c60786115
|
[
"MIT"
] | 2
|
2019-04-20T17:30:48.000Z
|
2019-09-10T23:17:02.000Z
|
MagicMirror/display/migrations/0001_initial.py
|
hu-tianyi/Magic-MIrror-Display
|
d0403118c87a02468be2fb01f8c9565c60786115
|
[
"MIT"
] | null | null | null |
MagicMirror/display/migrations/0001_initial.py
|
hu-tianyi/Magic-MIrror-Display
|
d0403118c87a02468be2fb01f8c9565c60786115
|
[
"MIT"
] | 1
|
2019-04-25T09:58:17.000Z
|
2019-04-25T09:58:17.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-19 15:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Devices',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location', models.IntegerField()),
('name', models.CharField(max_length=8)),
('datetime', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Tips',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tip', models.CharField(max_length=20)),
],
),
]
| 28.848485
| 114
| 0.563025
|
3ed7b5a06a2757418eda8cd3566262b4653670a4
| 4,194
|
py
|
Python
|
benchmark/startQiskit_Class2520.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_Class2520.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_Class2520.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=4
# total number=37
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.cx(input_qubit[0],input_qubit[3]) # number=13
prog.cx(input_qubit[0],input_qubit[3]) # number=17
prog.x(input_qubit[3]) # number=18
prog.cx(input_qubit[0],input_qubit[3]) # number=19
prog.cx(input_qubit[0],input_qubit[3]) # number=15
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[1]) # number=31
prog.cz(input_qubit[2],input_qubit[1]) # number=32
prog.h(input_qubit[1]) # number=33
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=12
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[0]) # number=24
prog.cz(input_qubit[3],input_qubit[0]) # number=25
prog.h(input_qubit[0]) # number=26
prog.cx(input_qubit[3],input_qubit[0]) # number=28
prog.z(input_qubit[3]) # number=29
prog.cx(input_qubit[3],input_qubit[0]) # number=30
prog.x(input_qubit[2]) # number=23
prog.cx(input_qubit[3],input_qubit[0]) # number=22
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.y(input_qubit[2]) # number=10
prog.y(input_qubit[2]) # number=11
prog.x(input_qubit[3]) # number=36
prog.cx(input_qubit[3],input_qubit[0]) # number=34
prog.cx(input_qubit[3],input_qubit[0]) # number=35
# circuit end
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('statevector_simulator')
sample_shot =8000
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_Class2520.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 34.661157
| 140
| 0.648069
|
078f84aebbc3407471c5a8faa229c6474a406dda
| 15,272
|
py
|
Python
|
test/unit/common/test_daemon.py
|
fossabot/swift-1
|
63fc013b8b96484cede0e9901ad54676b8c93298
|
[
"Apache-2.0"
] | 1
|
2021-09-30T14:00:22.000Z
|
2021-09-30T14:00:22.000Z
|
test/unit/common/test_daemon.py
|
fossabot/swift-1
|
63fc013b8b96484cede0e9901ad54676b8c93298
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
test/unit/common/test_daemon.py
|
fossabot/swift-1
|
63fc013b8b96484cede0e9901ad54676b8c93298
|
[
"Apache-2.0"
] | 2
|
2020-03-15T01:24:15.000Z
|
2020-07-22T20:34:26.000Z
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from six import StringIO
import time
import unittest
from getpass import getuser
import logging
from test.unit import tmpfile
import mock
import signal
from contextlib import contextmanager
import itertools
from collections import defaultdict
import errno
from swift.common import daemon, utils
from test.unit import debug_logger
class MyDaemon(daemon.Daemon):
def __init__(self, conf):
self.conf = conf
self.logger = debug_logger('my-daemon')
MyDaemon.forever_called = False
MyDaemon.once_called = False
def run_forever(self):
MyDaemon.forever_called = True
def run_once(self):
MyDaemon.once_called = True
def run_raise(self):
raise OSError
def run_quit(self):
raise KeyboardInterrupt
class TestDaemon(unittest.TestCase):
def test_create(self):
d = daemon.Daemon({})
self.assertEqual(d.conf, {})
self.assertTrue(isinstance(d.logger, utils.LogAdapter))
def test_stubs(self):
d = daemon.Daemon({})
self.assertRaises(NotImplementedError, d.run_once)
self.assertRaises(NotImplementedError, d.run_forever)
class MyWorkerDaemon(MyDaemon):
def __init__(self, *a, **kw):
super(MyWorkerDaemon, self).__init__(*a, **kw)
MyWorkerDaemon.post_multiprocess_run_called = False
def get_worker_args(self, once=False, **kwargs):
return [kwargs for i in range(int(self.conf.get('workers', 0)))]
def is_healthy(self):
try:
return getattr(self, 'health_side_effects', []).pop(0)
except IndexError:
return True
def post_multiprocess_run(self):
MyWorkerDaemon.post_multiprocess_run_called = True
class TestWorkerDaemon(unittest.TestCase):
def test_stubs(self):
d = daemon.Daemon({})
self.assertRaises(NotImplementedError, d.run_once)
self.assertRaises(NotImplementedError, d.run_forever)
self.assertEqual([], d.get_worker_args())
self.assertEqual(True, d.is_healthy())
def test_my_worker_daemon(self):
d = MyWorkerDaemon({})
self.assertEqual([], d.get_worker_args())
self.assertTrue(d.is_healthy())
d = MyWorkerDaemon({'workers': '3'})
self.assertEqual([{'key': 'val'}] * 3, d.get_worker_args(key='val'))
d.health_side_effects = [True, False]
self.assertTrue(d.is_healthy())
self.assertFalse(d.is_healthy())
self.assertTrue(d.is_healthy())
class TestRunDaemon(unittest.TestCase):
def setUp(self):
for patcher in [
mock.patch.object(utils, 'HASH_PATH_PREFIX', b'startcap'),
mock.patch.object(utils, 'HASH_PATH_SUFFIX', b'endcap'),
mock.patch.object(utils, 'drop_privileges', lambda *args: None),
mock.patch.object(utils, 'capture_stdio', lambda *args: None),
]:
patcher.start()
self.addCleanup(patcher.stop)
def test_run(self):
d = MyDaemon({})
self.assertFalse(MyDaemon.forever_called)
self.assertFalse(MyDaemon.once_called)
# test default
d.run()
self.assertEqual(d.forever_called, True)
# test once
d.run(once=True)
self.assertEqual(d.once_called, True)
def test_signal(self):
d = MyDaemon({})
with mock.patch('swift.common.daemon.signal') as mock_signal:
mock_signal.SIGTERM = signal.SIGTERM
daemon.DaemonStrategy(d, d.logger).run()
signal_args, kwargs = mock_signal.signal.call_args
sig, func = signal_args
self.assertEqual(sig, signal.SIGTERM)
with mock.patch('swift.common.daemon.os') as mock_os:
func()
self.assertEqual(mock_os.method_calls, [
mock.call.killpg(0, signal.SIGTERM),
# hard exit because bare except handlers can trap SystemExit
mock.call._exit(0)
])
def test_run_daemon(self):
sample_conf = "[my-daemon]\nuser = %s\n" % getuser()
with tmpfile(sample_conf) as conf_file, \
mock.patch('swift.common.daemon.use_hub') as mock_use_hub:
with mock.patch.dict('os.environ', {'TZ': ''}), \
mock.patch('time.tzset') as mock_tzset:
daemon.run_daemon(MyDaemon, conf_file)
self.assertTrue(MyDaemon.forever_called)
self.assertEqual(os.environ['TZ'], 'UTC+0')
self.assertEqual(mock_tzset.mock_calls, [mock.call()])
self.assertEqual(mock_use_hub.mock_calls,
[mock.call(utils.get_hub())])
daemon.run_daemon(MyDaemon, conf_file, once=True)
self.assertEqual(MyDaemon.once_called, True)
# test raise in daemon code
with mock.patch.object(MyDaemon, 'run_once', MyDaemon.run_raise):
self.assertRaises(OSError, daemon.run_daemon, MyDaemon,
conf_file, once=True)
# test user quit
sio = StringIO()
logger = logging.getLogger('server')
logger.addHandler(logging.StreamHandler(sio))
logger = utils.get_logger(None, 'server', log_route='server')
with mock.patch.object(MyDaemon, 'run_forever', MyDaemon.run_quit):
daemon.run_daemon(MyDaemon, conf_file, logger=logger)
self.assertTrue('user quit' in sio.getvalue().lower())
# test missing section
sample_conf = "[default]\nuser = %s\n" % getuser()
with tmpfile(sample_conf) as conf_file:
self.assertRaisesRegexp(SystemExit,
'Unable to find my-daemon '
'config section in.*',
daemon.run_daemon, MyDaemon,
conf_file, once=True)
def test_run_daemon_diff_tz(self):
old_tz = os.environ.get('TZ', '')
try:
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
self.assertEqual((1970, 1, 1, 0, 0, 0), time.gmtime(0)[:6])
self.assertEqual((1969, 12, 31, 19, 0, 0), time.localtime(0)[:6])
self.assertEqual(18000, time.timezone)
sample_conf = "[my-daemon]\nuser = %s\n" % getuser()
with tmpfile(sample_conf) as conf_file, \
mock.patch('swift.common.daemon.use_hub'):
daemon.run_daemon(MyDaemon, conf_file)
self.assertFalse(MyDaemon.once_called)
self.assertTrue(MyDaemon.forever_called)
self.assertEqual((1970, 1, 1, 0, 0, 0), time.gmtime(0)[:6])
self.assertEqual((1970, 1, 1, 0, 0, 0), time.localtime(0)[:6])
self.assertEqual(0, time.timezone)
finally:
os.environ['TZ'] = old_tz
time.tzset()
@contextmanager
def mock_os(self, child_worker_cycles=3):
self.waitpid_calls = defaultdict(int)
def mock_waitpid(p, *args):
self.waitpid_calls[p] += 1
if self.waitpid_calls[p] >= child_worker_cycles:
rv = p
else:
rv = 0
return rv, 0
with mock.patch('swift.common.daemon.os.fork') as mock_fork, \
mock.patch('swift.common.daemon.os.waitpid', mock_waitpid), \
mock.patch('swift.common.daemon.os.kill') as mock_kill:
mock_fork.side_effect = (
'mock-pid-%s' % i for i in itertools.count())
self.mock_fork = mock_fork
self.mock_kill = mock_kill
yield
def test_fork_workers(self):
d = MyWorkerDaemon({'workers': 3})
strategy = daemon.DaemonStrategy(d, d.logger)
with self.mock_os():
strategy.run(once=True)
self.assertEqual([mock.call()] * 3, self.mock_fork.call_args_list)
self.assertEqual(self.waitpid_calls, {
'mock-pid-0': 3,
'mock-pid-1': 3,
'mock-pid-2': 3,
})
self.assertEqual([], self.mock_kill.call_args_list)
self.assertIn('Finished', d.logger.get_lines_for_level('notice')[-1])
self.assertTrue(MyWorkerDaemon.post_multiprocess_run_called)
def test_forked_worker(self):
d = MyWorkerDaemon({'workers': 3})
strategy = daemon.DaemonStrategy(d, d.logger)
with mock.patch('swift.common.daemon.os.fork') as mock_fork, \
mock.patch('swift.common.daemon.os._exit') as mock_exit:
mock_fork.return_value = 0
mock_exit.side_effect = SystemExit
self.assertRaises(SystemExit, strategy.run, once=True)
self.assertTrue(d.once_called)
def test_restart_workers(self):
d = MyWorkerDaemon({'workers': 3})
strategy = daemon.DaemonStrategy(d, d.logger)
d.health_side_effects = [True, False]
with self.mock_os():
self.mock_kill.side_effect = lambda *args, **kwargs: setattr(
strategy, 'running', False)
strategy.run()
# six workers forked in total
self.assertEqual([mock.call()] * 6, self.mock_fork.call_args_list)
# since the daemon starts healthy, first pass checks children once
self.assertEqual(self.waitpid_calls, {
'mock-pid-0': 1,
'mock-pid-1': 1,
'mock-pid-2': 1,
})
# second pass is not healthy, original pid's killed
self.assertEqual(set([
('mock-pid-0', signal.SIGTERM),
('mock-pid-1', signal.SIGTERM),
('mock-pid-2', signal.SIGTERM),
]), set(c[0] for c in self.mock_kill.call_args_list[:3]))
# our mock_kill side effect breaks out of running, and cleanup kills
# remaining pids
self.assertEqual(set([
('mock-pid-3', signal.SIGTERM),
('mock-pid-4', signal.SIGTERM),
('mock-pid-5', signal.SIGTERM),
]), set(c[0] for c in self.mock_kill.call_args_list[3:]))
def test_worker_disappears(self):
d = MyWorkerDaemon({'workers': 3})
strategy = daemon.DaemonStrategy(d, d.logger)
strategy.register_worker_start('mock-pid', {'mock_options': True})
self.assertEqual(strategy.unspawned_worker_options, [])
self.assertEqual(strategy.options_by_pid, {
'mock-pid': {'mock_options': True}
})
# still running
with mock.patch('swift.common.daemon.os.waitpid') as mock_waitpid:
mock_waitpid.return_value = (0, 0)
strategy.check_on_all_running_workers()
self.assertEqual(strategy.unspawned_worker_options, [])
self.assertEqual(strategy.options_by_pid, {
'mock-pid': {'mock_options': True}
})
# finished
strategy = daemon.DaemonStrategy(d, d.logger)
strategy.register_worker_start('mock-pid', {'mock_options': True})
with mock.patch('swift.common.daemon.os.waitpid') as mock_waitpid:
mock_waitpid.return_value = ('mock-pid', 0)
strategy.check_on_all_running_workers()
self.assertEqual(strategy.unspawned_worker_options, [
{'mock_options': True}])
self.assertEqual(strategy.options_by_pid, {})
self.assertEqual(d.logger.get_lines_for_level('debug')[-1],
'Worker mock-pid exited')
# disappeared
strategy = daemon.DaemonStrategy(d, d.logger)
strategy.register_worker_start('mock-pid', {'mock_options': True})
with mock.patch('swift.common.daemon.os.waitpid') as mock_waitpid:
mock_waitpid.side_effect = OSError(
errno.ECHILD, os.strerror(errno.ECHILD))
mock_waitpid.return_value = ('mock-pid', 0)
strategy.check_on_all_running_workers()
self.assertEqual(strategy.unspawned_worker_options, [
{'mock_options': True}])
self.assertEqual(strategy.options_by_pid, {})
self.assertEqual(d.logger.get_lines_for_level('notice')[-1],
'Worker mock-pid died')
def test_worker_kills_pids_in_cleanup(self):
d = MyWorkerDaemon({'workers': 2})
strategy = daemon.DaemonStrategy(d, d.logger)
strategy.register_worker_start('mock-pid-1', {'mock_options': True})
strategy.register_worker_start('mock-pid-2', {'mock_options': True})
self.assertEqual(strategy.unspawned_worker_options, [])
self.assertEqual(strategy.options_by_pid, {
'mock-pid-1': {'mock_options': True},
'mock-pid-2': {'mock_options': True},
})
with mock.patch('swift.common.daemon.os.kill') as mock_kill:
strategy.cleanup()
self.assertEqual(strategy.unspawned_worker_options, [
{'mock_options': True}] * 2)
self.assertEqual(strategy.options_by_pid, {})
self.assertEqual(set([
('mock-pid-1', signal.SIGTERM),
('mock-pid-2', signal.SIGTERM),
]), set(c[0] for c in mock_kill.call_args_list))
self.assertEqual(set(d.logger.get_lines_for_level('debug')[-2:]),
set(['Cleaned up worker mock-pid-1',
'Cleaned up worker mock-pid-2']))
def test_worker_disappears_in_cleanup(self):
d = MyWorkerDaemon({'workers': 2})
strategy = daemon.DaemonStrategy(d, d.logger)
strategy.register_worker_start('mock-pid-1', {'mock_options': True})
strategy.register_worker_start('mock-pid-2', {'mock_options': True})
self.assertEqual(strategy.unspawned_worker_options, [])
self.assertEqual(strategy.options_by_pid, {
'mock-pid-1': {'mock_options': True},
'mock-pid-2': {'mock_options': True},
})
with mock.patch('swift.common.daemon.os.kill') as mock_kill:
mock_kill.side_effect = [None, OSError(errno.ECHILD,
os.strerror(errno.ECHILD))]
strategy.cleanup()
self.assertEqual(strategy.unspawned_worker_options, [
{'mock_options': True}] * 2)
self.assertEqual(strategy.options_by_pid, {})
self.assertEqual(set([
('mock-pid-1', signal.SIGTERM),
('mock-pid-2', signal.SIGTERM),
]), set(c[0] for c in mock_kill.call_args_list))
self.assertEqual(set(d.logger.get_lines_for_level('debug')[-2:]),
set(['Cleaned up worker mock-pid-1',
'Cleaned up worker mock-pid-2']))
if __name__ == '__main__':
unittest.main()
| 40.725333
| 79
| 0.60516
|
9c9ea4c874efbef43b37b72a2fb5a3c644f933e1
| 2,169
|
py
|
Python
|
idaes/tests/test_style.py
|
eslickj/idaes-pse
|
328ed07ffb0b4d98c03e972675ea32c41dd2531a
|
[
"RSA-MD"
] | 112
|
2019-02-11T23:16:36.000Z
|
2022-03-23T20:59:57.000Z
|
idaes/tests/test_style.py
|
eslickj/idaes-pse
|
328ed07ffb0b4d98c03e972675ea32c41dd2531a
|
[
"RSA-MD"
] | 621
|
2019-03-01T14:44:12.000Z
|
2022-03-31T19:49:25.000Z
|
idaes/tests/test_style.py
|
eslickj/idaes-pse
|
328ed07ffb0b4d98c03e972675ea32c41dd2531a
|
[
"RSA-MD"
] | 154
|
2019-02-01T23:46:33.000Z
|
2022-03-23T15:07:10.000Z
|
#################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
"""
Tests for Python code style.
"""
import logging
import os
from pathlib import Path
import subprocess
import pytest
_log = logging.getLogger(__name__)
# The most stylish dirs in the project
DIRS = [
str(p)
for p in (
Path("idaes/dmf"),
# Path("apps/ddm-learning/alamo_python/alamopy"),
# Path("apps/ddm-learning/ripe_python/ripe"),
)
]
STYLE_CHECK_CMD = "flake8"
@pytest.mark.unit
def test_flake8():
cwd = os.getcwd()
for d in DIRS:
path = os.path.join(cwd, d)
if not os.path.exists(path):
_log.warning(
f"Target path '{d}' not found in current dir, '{cwd}'. " "Skipping test"
)
continue
if not os.path.isdir(path):
_log.warning(
f"Target path '{d}' in current dir, '{cwd}', is not a directory. "
"Skipping test"
)
continue
cmd = [STYLE_CHECK_CMD, d]
_log.info(f"Test code style with command '{' '.join(cmd)}'")
try:
proc = subprocess.Popen(cmd)
except FileNotFoundError:
_log.warning(
f"Style checker {STYLE_CHECK_CMD} not found. Skipping style tests"
)
break
proc.wait()
status = proc.returncode
assert status == 0, f"Style checker '{STYLE_CHECK_CMD}' had errors for {path}"
| 32.373134
| 88
| 0.582296
|
5487e19c11152b59b2fb6a7cb8664973aafef48b
| 4,371
|
py
|
Python
|
chapter3/hw3-2and3-3 3/blogPostDAO.py
|
Baw25/MongoStuff
|
0a6f35505948d36b3bf1b07a1a75709babaeca71
|
[
"MIT"
] | null | null | null |
chapter3/hw3-2and3-3 3/blogPostDAO.py
|
Baw25/MongoStuff
|
0a6f35505948d36b3bf1b07a1a75709babaeca71
|
[
"MIT"
] | null | null | null |
chapter3/hw3-2and3-3 3/blogPostDAO.py
|
Baw25/MongoStuff
|
0a6f35505948d36b3bf1b07a1a75709babaeca71
|
[
"MIT"
] | null | null | null |
__author__ = 'aje'
#
# Copyright (c) 2008 - 2013 10gen, Inc. <http://10gen.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import sys
import re
import datetime
# The Blog Post Data Access Object handles interactions with the Posts collection
class BlogPostDAO:
# constructor for the class
def __init__(self, database):
self.db = database
self.posts = database.posts
# inserts the blog entry and returns a permalink for the entry
def insert_entry(self, title, post, tags_array, author):
print "inserting blog entry", title, post
# fix up the permalink to not include whitespace
exp = re.compile('\W') # match anything not alphanumeric
whitespace = re.compile('\s')
temp_title = whitespace.sub("_",title)
permalink = exp.sub('', temp_title)
# Build a new post
post = {"title": title,
"author": author,
"body": post,
"permalink":permalink,
"tags": tags_array,
"comments": [],
"date": datetime.datetime.utcnow()}
# now insert the post
try:
# XXX HW 3.2 Work Here to insert the post
print "Inserting the post"
self.posts.insert(post)
except:
print "Error inserting post"
print "Unexpected error:", sys.exc_info()[0]
return permalink
# returns an array of num_posts posts, reverse ordered by date.
# db.collection.update(
# { "_id": ID, "playlists._id": "58"},
# { "$push":
# {"playlists.$.musics":
# {
# "name": "test name",
# "duration": "4.00"
# }
# }
# }
# )
def get_posts(self, num_posts):
cursor = iter(()) # Using an empty itable for a placeholder so blog compiles before you make your changes
# XXX HW 3.2 Work here to get the posts
cursor = self.posts.find({})
l = []
for post in cursor:
post['date'] = post['date'].strftime("%A, %B %d %Y at %I:%M%p") # fix up date
if 'tags' not in post:
post['tags'] = [] # fill it in if its not there already
if 'comments' not in post:
post['comments'] = []
l.append({'title':post['title'], 'body':post['body'], 'post_date':post['date'],
'permalink':post['permalink'],
'tags':post['tags'],
'author':post['author'],
'comments':post['comments']})
return l
# find a post corresponding to a particular permalink
def get_post_by_permalink(self, permalink):
post = None
# XXX 3.2 Work here to retrieve the specified post
post = self.posts.find({'permalink': permalink})[0]
if post is not None:
# fix up date
post['date'] = post['date'].strftime("%A, %B %d %Y at %I:%M%p")
return post
# add a comment to a particular blog post
def add_comment(self, permalink, name, email, body):
comment = {'author': name, 'body': body}
if (email != ""):
comment['email'] = email
try:
# XXX HW 3.3 Work here to add the comment to the designated post. When done, modify the line below to return the number of documents updated by your modification, rather than just -1.
self.posts.update(
{'permalink': permalink},
{"$push":
{"comments": comment}
}
)
return -1 # Change this to return the number of documents updated by the code for HW 3.3
except:
print "Could not update the collection, error"
print "Unexpected error:", sys.exc_info()[0]
return 0
| 32.139706
| 195
| 0.565088
|
a41832bd44808b78900521ed2779817af59dd797
| 1,849
|
py
|
Python
|
flakeheaven/_logic/_config.py
|
snmishra/flakeheaven
|
20f94457744c47d965d4520d3b22def538b0cc49
|
[
"MIT"
] | 1
|
2022-02-07T14:47:48.000Z
|
2022-02-07T14:47:48.000Z
|
flakeheaven/_logic/_config.py
|
snmishra/flakeheaven
|
20f94457744c47d965d4520d3b22def538b0cc49
|
[
"MIT"
] | null | null | null |
flakeheaven/_logic/_config.py
|
snmishra/flakeheaven
|
20f94457744c47d965d4520d3b22def538b0cc49
|
[
"MIT"
] | null | null | null |
# built-in
from pathlib import Path
from typing import Any, Dict
# external
import toml
import urllib3
from flake8.utils import normalize_paths
def read_config(*paths) -> Dict[str, Any]:
config = dict() # type: Dict[str, Any]
for path in paths:
if isinstance(path, Path):
new_config = _read_local(path)
elif path.startswith(('https://', 'http://')):
new_config = _read_remote(path)
elif Path(path).exists():
new_config = _read_local(Path(path))
else:
new_config = _read_remote(path)
config = _merge_configs(config, new_config)
return config
def _read_local(path: Path) -> Dict[str, Any]:
with path.open('r') as stream:
return _parse_config(stream.read())
def _read_remote(url: str) -> Dict[str, Any]:
http = urllib3.PoolManager()
response = http.request('GET', url)
return _parse_config(response.data.decode())
def _merge_configs(*configs) -> Dict[str, Any]:
config = dict()
for subconfig in configs:
config.update(subconfig)
for section in ('plugins', 'exceptions'):
config[section] = dict()
for subconfig in configs:
config[section].update(subconfig.get(section, {}))
return config
def _parse_config(content: str) -> Dict[str, Any]:
config = toml.loads(content).get('tool', {}).get('flakeheaven', {})
config = dict(config)
for section in ('plugins', 'exceptions'):
if section in config:
config[section] = dict(config[section])
if 'base' in config:
paths = config['base']
if not isinstance(paths, list):
paths = [paths]
config = _merge_configs(read_config(*paths), config)
if 'exclude' in config:
config['exclude'] = normalize_paths(config['exclude'])
return config
| 27.191176
| 71
| 0.624121
|
1e695a14c67a65720abd119b9f4083c31851b58f
| 12,461
|
py
|
Python
|
Tensile/KernelWriterConversion.py
|
nielenventer/Tensile
|
3625c894aa73533453b2eac15285174afce4e2dd
|
[
"MIT"
] | null | null | null |
Tensile/KernelWriterConversion.py
|
nielenventer/Tensile
|
3625c894aa73533453b2eac15285174afce4e2dd
|
[
"MIT"
] | null | null | null |
Tensile/KernelWriterConversion.py
|
nielenventer/Tensile
|
3625c894aa73533453b2eac15285174afce4e2dd
|
[
"MIT"
] | null | null | null |
################################################################################
# Copyright 2020-2021 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell cop-
# ies of the Software, and to permit persons to whom the Software is furnished
# to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IM-
# PLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNE-
# CTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
################################################################################
from copy import deepcopy
from .Common import globalParameters, CHeader
from .KernelWriterBase import KernelWriterBase
class KernelWriterConversion(KernelWriterBase):
def __init__(self, state):
super().__init__()
self.state["ProblemType"] = deepcopy(state["ProblemType"])
self.state["_GlobalAccumulation"] = state["_GlobalAccumulation"]
# derive parameter
self.language = "HIP"
self.kernelName = self.getKernelName()
self.datatype = self.state["ProblemType"]["ComputeDataType"].toDevice(self.language)
# determine chars for fast access
self.indexChars = []
for i in range(0, len(globalParameters["IndexChars"])):
self.indexChars.append(globalParameters["IndexChars"][i])
self.indexChars[self.state["ProblemType"]["Index0"]] = "0" + self.indexChars[self.state["ProblemType"]["Index0"]]
self.indexChars[self.state["ProblemType"]["Index1"]] = "1" + self.indexChars[self.state["ProblemType"]["Index1"]]
self.tileChar0 = self.indexChars[self.state["ProblemType"]["Index0"]]
self.tileChar1 = self.indexChars[self.state["ProblemType"]["Index1"]]
def functionSignature(self):
kStr = ""
# kernel name
kStr += self.endLine
kStr += "extern \"C\"\n"
kStr += "__global__ "
kStr += "void %s" % ( self.kernelName )
kStr += "(" + self.endLine
# pointers
ptrStr = self.state["ProblemType"]["DestDataType"].toDevice(self.language)
ptrStr += '' if self.state["ProblemType"]["StridedBatched"] else '*'
bStr = '' if self.state["ProblemType"]["StridedBatched"] else 'Batch'
kStr += " " + ptrStr + " * " + bStr + "D," + self.endLine
kStr += " " + self.datatype + " * W," + self.endLine
kStr += " " + ptrStr + " const * " + bStr + "C," + self.endLine
# alpha & beta
kStr += " %s const alpha,%s" % (self.state["ProblemType"]["ComputeDataType"].toDevice(self.language), self.endLine)
kStr += " %s const beta,%s" % (self.state["ProblemType"]["ComputeDataType"].toDevice(self.language), self.endLine)
# strides
firstStrideCD = 1
if self.state["ProblemType"]["UseInitialStridesCD"]:
firstStrideCD = 0
lastStrideC = self.state["ProblemType"]["NumIndicesC"]
for i in range(firstStrideCD, lastStrideC):
kStr += " unsigned int const strideD%s,%s" % (self.indexChars[i], self.endLine)
for i in range(firstStrideCD, lastStrideC):
kStr += " unsigned int const strideW%s,%s" % (self.indexChars[i], self.endLine)
for i in range(firstStrideCD, lastStrideC):
kStr += " unsigned int const strideC%s,%s" % (self.indexChars[i], self.endLine)
# sizes
for i in range(0, self.state["ProblemType"]["NumIndicesC"]):
kStr += " unsigned int const size%s,%s" % (self.indexChars[i], self.endLine)
# offset
kStr += " unsigned int offsetD,%s" % self.endLine
kStr += " unsigned int offsetC,%s" % self.endLine
# gsu
kStr += " unsigned int const gsu)%s" % self.endLine
return kStr
def kernelBody(self):
kStr = ""
kStr += "{%s" % self.endLine
problemType = self.state["ProblemType"]
########################################
# defined initial strides
firstStride = 0
if problemType["UseInitialStridesCD"]:
# no strides #defined
lastStrideC = 0
assert 0 # need to fix beta-clear routine to pass initial stride parms
else:
# #define initial stride
kStr += "/* hard-coded initial strides */%s" % self.endLine
lastStrideC = 1
for i in range(firstStride, lastStrideC):
kStr += "#define strideD" + self.indexChars[i] + " 1" + self.endLine
for i in range(firstStride, lastStrideC):
kStr += "#define strideW" + self.indexChars[i] + " 1" + self.endLine
for i in range(firstStride, lastStrideC):
kStr += "#define strideC" + self.indexChars[i] + " 1" + self.endLine
########################################
# GLOBAL_D()
kStr += "#define GLOBAL_D(IDX%s" % self.indexChars[0]
for i in range(1, problemType["NumIndicesC"]):
kStr += ", IDX%s" % self.indexChars[i]
indexChar = self.indexChars[0]
kStr += ") (( (IDX%s)*strideD%s" % (indexChar, indexChar)
for i in range(1, problemType["NumIndicesC"]):
indexChar = self.indexChars[i]
kStr += " + (IDX%s)*strideD%s" % (indexChar, indexChar)
kStr += " ))" + self.endLine
# GLOBAL_W()
kStr += "#define GLOBAL_W(IDX%s" % self.indexChars[0]
for i in range(1, problemType["NumIndicesC"]):
kStr += ", IDX%s" % self.indexChars[i]
indexChar = self.indexChars[0]
kStr += ") (( (IDX%s)*strideW%s" % (indexChar, indexChar)
for i in range(1, problemType["NumIndicesC"]):
indexChar = self.indexChars[i]
kStr += " + (IDX%s)*strideW%s" % (indexChar, indexChar)
kStr += " ))" + self.endLine
# GLOBAL_C()
kStr += "#define GLOBAL_C(IDX%s" % self.indexChars[0]
for i in range(1, problemType["NumIndicesC"]):
kStr += ", IDX%s" % self.indexChars[i]
indexChar = self.indexChars[0]
kStr += ") (( (IDX%s)*strideC%s" % (indexChar, indexChar)
for i in range(1, problemType["NumIndicesC"]):
indexChar = self.indexChars[i]
kStr += " + (IDX%s)*strideC%s" % (indexChar, indexChar)
kStr += " ))" + self.endLine
########################################
# multi buffers GSU: Accumulate all GSU buffer
indexChar = self.indexChars[0]
kStr += " uint64_t id = %s(0);%s" % (self.getGlobalIdStr, self.endLine)
kStr += " if (id >= (size%s" % self.indexChars[0]
for i in range(1, problemType["NumIndicesC"]):
kStr += "*size%s" % self.indexChars[i]
kStr += "))%s" % self.endLine
kStr += " return;%s" % self.endLine
kStr += self.endLine
kStr += " uint64_t id0"
for i in range(1, problemType["NumIndicesC"]):
kStr += ", id%d" % i
kStr += ";%s" % self.endLine
for i in range(0, problemType["NumIndicesC"]):
kStr += " id%d = id %% size%s;%s" % (i, self.indexChars[i], self.endLine)
kStr += " id = id / size%s;%s" % (self.indexChars[i], self.endLine)
nonTileFreeIndices = []
########################################
# apply batch
if not self.state["ProblemType"]["StridedBatched"]:
nonTileFreeIndices = list(range(0, self.state["ProblemType"]["NumIndicesC"]))
nonTileFreeIndices.remove(self.state["ProblemType"]["Index0"])
nonTileFreeIndices.remove(self.state["ProblemType"]["Index1"])
kStr += self.endLine
kStr += " uint64_t wg = 0"
batchStride = "1"
for i in nonTileFreeIndices:
kStr += " + id%d * %s " % (i, batchStride)
batchStride += " * size%s" % self.indexChars[i]
kStr += ";" + self.endLine
ptrStr = self.state["ProblemType"]["DestDataType"].toDevice(self.language)
kStr += " " + ptrStr + " * D = BatchD[wg];" + self.endLine
ptrStr = self.state["ProblemType"]["DestDataType"].toDevice(self.language)
zeroStr = self.state["ProblemType"]["ComputeDataType"].zeroString(self.language, 1)
kStr += " " + ptrStr + f" const* C = (beta == {zeroStr}) ? nullptr : BatchC[wg];" + self.endLine
########################################
# apply offset
kStr += self.endLine
kStr += " D = D + offsetD;" + self.endLine
kStr += " C = C + offsetC;" + self.endLine
########################################
# D index
kStr += self.endLine
kStr += " %s idxD = GLOBAL_D( (%s)" % (self.uint64Str, self.uint64Str)
for i in range(problemType["NumIndicesC"]):
kStr += ', ' if i else ''
kStr += '0' if i in nonTileFreeIndices else ('id%d' % i)
kStr += ");%s" % (self.endLine)
# W index
kStr += " %s idxW = GLOBAL_W( (%s)" % (self.uint64Str, self.uint64Str)
for i in range(problemType["NumIndicesC"]):
kStr += ', ' if i else ''
kStr += 'id%d' % i
kStr += ");%s" % (self.endLine)
# D index
kStr += " %s idxC = GLOBAL_C( (%s)" % (self.uint64Str, self.uint64Str)
for i in range(problemType["NumIndicesC"]):
kStr += ', ' if i else ''
kStr += '0' if i in nonTileFreeIndices else ('id%d' % i)
kStr += ");%s" % (self.endLine)
########################################
# multi buffers GSU: Accumulate all GSU buffer
indexChar = self.indexChars[0]
kStr += " %s strideW = 1 + (size%s - 1) * strideW%s" % (self.uint64Str, indexChar, indexChar)
for i in range(1, problemType["NumIndicesC"]):
indexChar = self.indexChars[i]
kStr += " + (size%s - 1) * strideW%s" % (indexChar, indexChar)
kStr += ";" + self.endLine
kStr += " " + self.datatype + " accum = 0;%s" % self.endLine
kStr += " for (int i=0; i<gsu; i++) {%s" % self.endLine
kStr += " accum += W[idxW];%s" % self.endLine
kStr += " idxW += strideW;%s" % self.endLine
kStr += " }%s" % self.endLine
kStr += " if( beta == (%s)0)%s" % (self.state["ProblemType"]["ComputeDataType"].toDevice(self.language), self.endLine)
kStr += " accum = ((" + self.datatype + ")alpha) * accum;%s" % (self.endLine)
kStr += " else%s" % self.endLine
kStr += " accum = (((" + self.datatype + ")alpha) * accum + ((" + self.datatype + ")beta) * ((" + self.datatype + ")C[idxC]));" + self.endLine
typeStr = self.state["ProblemType"]["DestDataType"].toDevice(self.language)
kStr += " D[idxD] = (%s)accum;%s" % (typeStr, self.endLine)
########################################
# end
kStr += "}%s" % self.endLine
for i in range(firstStride, lastStrideC):
kStr += "#undef strideD" + self.indexChars[i] + self.endLine
for i in range(firstStride, lastStrideC):
kStr += "#undef strideW" + self.indexChars[i] + self.endLine
for i in range(firstStride, lastStrideC):
kStr += "#undef strideC" + self.indexChars[i] + self.endLine
kStr += "#undef GLOBAL_D%s" % (self.endLine)
kStr += "#undef GLOBAL_W%s" % (self.endLine)
kStr += "#undef GLOBAL_C%s" % (self.endLine)
return kStr
def getKernelName(self):
indexChars = globalParameters["IndexChars"]
# C dimensions
name = "C"
for i in range(0, self.state["ProblemType"]["NumIndicesC"]):
name += indexChars[i].lower()
name += "_"
name += self.state["ProblemType"]["DestDataType"].toChar()
name += "" if self.state["ProblemType"]["StridedBatched"] else "_GB"
name += "_PostGSU"
return name
def getHeaderFileString(self):
fileString = "" # CHeader
if not globalParameters["MergeFiles"]:
fileString += CHeader
fileString += "#pragma once\n\n"
fileString += "\n"
fileString += "#include <KernelHeader.h>\n\n"
fileString += "#include <hip/hip_runtime.h>\n"
fileString += "#include <hip/hip_fp16.h>\n"
fileString += "\n"
fileString += self.functionSignature()
fileString += ";\n"
return fileString
def getSourceFileString(self):
fileString = ""
if not globalParameters["MergeFiles"]:
fileString += "\n"
fileString += "#include \"%s.h\"\n" % self.kernelName
fileString += "\n"
fileString += self.functionSignature()
fileString += self.kernelBody()
return (0, fileString)
| 40.457792
| 149
| 0.598106
|
e6a7cb713f89d9766ce7027a60813d441286db11
| 492
|
py
|
Python
|
lib/model/session.py
|
dev1x-org/python-example
|
ad6b100d87d1ba8d5b77f160854dd3c387ded222
|
[
"MIT"
] | null | null | null |
lib/model/session.py
|
dev1x-org/python-example
|
ad6b100d87d1ba8d5b77f160854dd3c387ded222
|
[
"MIT"
] | null | null | null |
lib/model/session.py
|
dev1x-org/python-example
|
ad6b100d87d1ba8d5b77f160854dd3c387ded222
|
[
"MIT"
] | null | null | null |
#coding:utf-8
"""Model of Session
"""
class Session(object):
def __init__(self, **data):
self.data = data
def get_session_id(self):
return self.data["session_id"]
def get_user_id(self):
return self.data["user_id"]
def is_login(self):
if self.data["status"] == 0:
return False
else:
return True
def change_status(self):
pass
def to_string(self):
return self.data
| 16.4
| 38
| 0.544715
|
ee07f5845d44bb36b4e5f76971dd3af855ed88fb
| 2,068
|
py
|
Python
|
hatsploit/payloads/linux/armle/shell_reverse_tcp.py
|
sunmughan/rat
|
650d4ca1a6d2405b893002150740b85008268263
|
[
"MIT"
] | null | null | null |
hatsploit/payloads/linux/armle/shell_reverse_tcp.py
|
sunmughan/rat
|
650d4ca1a6d2405b893002150740b85008268263
|
[
"MIT"
] | null | null | null |
hatsploit/payloads/linux/armle/shell_reverse_tcp.py
|
sunmughan/rat
|
650d4ca1a6d2405b893002150740b85008268263
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
# This payload requires HatSploit: https://hatsploit.netlify.app
# Current source: https://github.com/EntySec/HatSploit
#
from hatvenom import HatVenom
from hatsploit.lib.payload import Payload
from hatsploit.utils.tcp import TCPClient
class HatSploitPayload(Payload, HatVenom):
details = {
'Category': "stager",
'Name': "Linux armle Shell Reverse TCP",
'Payload': "linux/armle/shell_reverse_tcp",
'Authors': [
'Ivan Nikolsky (enty8080) - payload developer'
],
'Description': "Shell reverse TCP payload for Linux armle.",
'Comments': [
''
],
'Architecture': "armle",
'Platform': "linux",
'Risk': "high",
'Type': "reverse_tcp"
}
options = {
'CBHOST': {
'Description': "Connect-back host.",
'Value': TCPClient.get_local_host(),
'Type': "ip",
'Required': True
},
'CBPORT': {
'Description': "Connect-back port.",
'Value': 8888,
'Type': "port",
'Required': True
}
}
def run(self):
connback_host, connback_port = self.parse_options(self.options)
offsets = {
'cbhost': connback_host,
'cbport': connback_port
}
shellcode = (
b"\x01\x10\x8F\xE2"
b"\x11\xFF\x2F\xE1"
b"\x02\x20\x01\x21"
b"\x92\x1A\x0F\x02"
b"\x19\x37\x01\xDF"
b"\x06\x1C\x08\xA1"
b"\x10\x22\x02\x37"
b"\x01\xDF\x3F\x27"
b"\x02\x21\x30\x1c"
b"\x01\xdf\x01\x39"
b"\xFB\xD5\x05\xA0"
b"\x92\x1a\x05\xb4"
b"\x69\x46\x0b\x27"
b"\x01\xDF\xC0\x46"
b"\x02\x00"
b":cbport:port:"
b":cbhost:ip:"
b"\x2f\x62\x69\x6e"
b"\x2f\x73\x68\x00"
)
payload = self.generate('elf', 'armle', shellcode, offsets)
return payload
| 26.512821
| 71
| 0.5
|
6d90ab6f086134e13b7330cde81d66e16e693cfe
| 2,692
|
py
|
Python
|
python/plugins/processing/modeler/MultilineTextPanel.py
|
dyna-mis/Hilabeling
|
cb7d5d4be29624a20c8a367162dbc6fd779b2b52
|
[
"MIT"
] | null | null | null |
python/plugins/processing/modeler/MultilineTextPanel.py
|
dyna-mis/Hilabeling
|
cb7d5d4be29624a20c8a367162dbc6fd779b2b52
|
[
"MIT"
] | null | null | null |
python/plugins/processing/modeler/MultilineTextPanel.py
|
dyna-mis/Hilabeling
|
cb7d5d4be29624a20c8a367162dbc6fd779b2b52
|
[
"MIT"
] | 1
|
2021-12-25T08:40:30.000Z
|
2021-12-25T08:40:30.000Z
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
MultilineTextPanel.py
---------------------
Date : January 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'January 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '176c06ceefb5f555205e72b20c962740cc0ec183'
from qgis.PyQt.QtWidgets import QComboBox, QPlainTextEdit, QSizePolicy, QVBoxLayout, QWidget
class MultilineTextPanel(QWidget):
USE_TEXT = 0
def __init__(self, options, parent=None):
super(MultilineTextPanel, self).__init__(parent)
self.options = options
self.verticalLayout = QVBoxLayout(self)
self.verticalLayout.setSpacing(2)
self.verticalLayout.setMargin(0)
self.combo = QComboBox()
self.combo.addItem(self.tr('[Use text below]'))
for option in options:
self.combo.addItem(option[0], option[1])
self.combo.setSizePolicy(QSizePolicy.Expanding,
QSizePolicy.Expanding)
self.verticalLayout.addWidget(self.combo)
self.textBox = QPlainTextEdit()
self.verticalLayout.addWidget(self.textBox)
self.setLayout(self.verticalLayout)
def setText(self, text):
self.textBox.setPlainText(text)
def getOption(self):
return self.combo.currentIndex()
def getValue(self):
if self.combo.currentIndex() == 0:
return str(self.textBox.toPlainText())
else:
return self.combo.currentData()
def setValue(self, value):
items = [self.combo.itemData(i) for i in range(1, self.combo.count())]
for idx, item in enumerate(items):
if item == value:
self.combo.setCurrentIndex(idx)
return
self.combo.setCurrentIndex(0)
if value:
self.textBox.setPlainText(value)
| 36.876712
| 92
| 0.534547
|
1eb587e8989345523b6869acdb18a6c60f6a3a32
| 4,097
|
py
|
Python
|
content/_code-samples/tx-serialization/base58/base58.py
|
fossabot/xrpl-dev-portal
|
c189df941fd6808c0ba8af9a28cdab418ff213a0
|
[
"Apache-2.0"
] | 107
|
2019-09-18T06:42:59.000Z
|
2021-07-19T09:24:26.000Z
|
content/_code-samples/tx-serialization/base58/base58.py
|
fossabot/xrpl-dev-portal
|
c189df941fd6808c0ba8af9a28cdab418ff213a0
|
[
"Apache-2.0"
] | 301
|
2019-08-15T22:14:54.000Z
|
2021-07-20T21:59:28.000Z
|
content/_code-samples/tx-serialization/base58/base58.py
|
fossabot/xrpl-dev-portal
|
c189df941fd6808c0ba8af9a28cdab418ff213a0
|
[
"Apache-2.0"
] | 442
|
2019-08-16T09:56:28.000Z
|
2021-07-22T07:47:03.000Z
|
'''Base58 encoding
Implementations of Base58 and Base58Check encodings that are compatible
with the XRP Ledger.
'''
# This code is adapted from the module by David Keijser at
# <https://github.com/keis/base58>. - rome@ripple.com
# His notes are preserved below:
# This module is based upon base58 snippets found scattered over many bitcoin
# tools written in python. From what I gather the original source is from a
# forum post by Gavin Andresen, so direct your praise to him.
# This module adds shiny packaging and support for python3.
from hashlib import sha256
__version__ = '1.0.3-xrp'
# 58 character alphabet used
# alphabet = b'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' # Bitcoin
alphabet = b'rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz' # XRP Ledger
if bytes == str: # python2
iseq, bseq, buffer = (
lambda s: map(ord, s),
lambda s: ''.join(map(chr, s)),
lambda s: s,
)
else: # python3
iseq, bseq, buffer = (
lambda s: s,
bytes,
lambda s: s.buffer,
)
def scrub_input(v):
if isinstance(v, str) and not isinstance(v, bytes):
v = v.encode('ascii')
if not isinstance(v, bytes):
raise TypeError(
"a bytes-like object is required (also str), not '%s'" %
type(v).__name__)
return v
def b58encode_int(i, default_one=True):
'''Encode an integer using Base58'''
if not i and default_one:
return alphabet[0:1]
string = b""
while i:
i, idx = divmod(i, 58)
string = alphabet[idx:idx+1] + string
return string
def b58encode(v):
'''Encode a string using Base58'''
v = scrub_input(v)
nPad = len(v)
v = v.lstrip(b'\0')
nPad -= len(v)
p, acc = 1, 0
for c in iseq(reversed(v)):
acc += p * c
p = p << 8
result = b58encode_int(acc, default_one=False)
return (alphabet[0:1] * nPad + result)
def b58decode_int(v):
'''Decode a Base58 encoded string as an integer'''
v = scrub_input(v)
decimal = 0
for char in v:
decimal = decimal * 58 + alphabet.index(char)
return decimal
def b58decode(v):
'''Decode a Base58 encoded string'''
v = scrub_input(v)
origlen = len(v)
v = v.lstrip(alphabet[0:1])
newlen = len(v)
acc = b58decode_int(v)
result = []
while acc > 0:
acc, mod = divmod(acc, 256)
result.append(mod)
return (b'\0' * (origlen - newlen) + bseq(reversed(result)))
def b58encode_check(v):
'''Encode a string using Base58 with a 4 character checksum'''
digest = sha256(sha256(v).digest()).digest()
return b58encode(v + digest[:4])
def b58decode_check(v):
'''Decode and verify the checksum of a Base58 encoded string'''
result = b58decode(v)
result, check = result[:-4], result[-4:]
digest = sha256(sha256(result).digest()).digest()
if check != digest[:4]:
raise ValueError("Invalid checksum")
return result
def main():
'''Base58 encode or decode FILE, or standard input, to standard output.'''
import sys
import argparse
stdout = buffer(sys.stdout)
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument(
'file',
metavar='FILE',
nargs='?',
type=argparse.FileType('r'),
default='-')
parser.add_argument(
'-d', '--decode',
action='store_true',
help='decode data')
parser.add_argument(
'-c', '--check',
action='store_true',
help='append a checksum before encoding')
args = parser.parse_args()
fun = {
(False, False): b58encode,
(False, True): b58encode_check,
(True, False): b58decode,
(True, True): b58decode_check
}[(args.decode, args.check)]
data = buffer(args.file).read()
try:
result = fun(data)
except Exception as e:
sys.exit(e)
if not isinstance(result, bytes):
result = result.encode('ascii')
stdout.write(result)
if __name__ == '__main__':
main()
| 23.016854
| 85
| 0.61484
|
b1f602025f04cb22e0612400c674791028360a9e
| 2,601
|
py
|
Python
|
profit/util/io.py
|
krystophny/profit
|
c6316c9df7cfaa7b30332fdbbf85ad27175eaf92
|
[
"MIT"
] | 14
|
2019-12-03T14:11:28.000Z
|
2022-03-15T13:44:06.000Z
|
profit/util/io.py
|
krystophny/profit
|
c6316c9df7cfaa7b30332fdbbf85ad27175eaf92
|
[
"MIT"
] | 118
|
2019-11-16T19:51:26.000Z
|
2022-03-26T13:52:00.000Z
|
profit/util/io.py
|
krystophny/profit
|
c6316c9df7cfaa7b30332fdbbf85ad27175eaf92
|
[
"MIT"
] | 9
|
2020-06-08T07:22:56.000Z
|
2021-03-21T14:12:21.000Z
|
from os import path, chdir, listdir
from .util import save, load
def read_input(filename):
""" Loads data from input file into a numpy array. """
data = load(filename)
return data.view((float, len(data.dtype.names))).T
def collect_output(config, default_interface=False):
""" Collects simulation results from each run directory into a single output file. """
from numpy import zeros, nan
from importlib.util import spec_from_file_location, module_from_spec
from tqdm import tqdm
if not default_interface:
try:
spec = spec_from_file_location('interface', config['interface'])
interface = module_from_spec(spec)
spec.loader.exec_module(interface)
except FileNotFoundError:
raise ImportError("Could not load interface {}".format(config['interface']))
else:
try:
name = [f for f in listdir(path.join(config['run_dir'], '000')) if f.endswith('out')][0]
except IndexError:
name = None
interface = DefaultInterface(name)
# TODO: do this in less code?
# Header for output
header = []
for out, values in config['output'].items():
if not values['range']:
header.append("{f}".format(f=out))
else:
for dependent, rng in values['range'].items():
for number in rng.flatten():
header.append("{f}({x}={n})".format(f=out, x=dependent, n=round(number, 2)))
# Get vector output
nout = 0
for v in config['output'].values():
for rng in v['range'].values():
nout += rng.size
dtypes = [(key, float) for key in config['output'].keys()]
data = zeros((config['ntrain'], max(int(nout), 1)), dtype=dtypes)
kruns = tqdm(range(config['ntrain']))
for krun in kruns:
# .zfill(3) is an option that forces krun to have 3 digits
run_dir_single = path.join(config['run_dir'], str(krun).zfill(3))
print(run_dir_single)
try:
chdir(run_dir_single)
# TODO: make get_output run with parameters e.g. config['interface']['params'] as *args
# Interface should return a tuple or list if more than one output variable.
alldata = interface.get_output()
for i, key in enumerate(data.dtype.names):
data[key][krun, :] = alldata[i] if isinstance(alldata, (tuple, list)) else alldata
except:
data[krun, :] = nan
finally:
chdir(config['run_dir'])
save(config['files']['output'], data, ' '.join(header))
| 37.157143
| 100
| 0.603614
|
6be72afc9294a266696ea4fdf256109074de1601
| 66
|
py
|
Python
|
src/routers/api.py
|
vcokltfre/bald-eagles
|
dac5420b403176d593fe0fee1b522e038b6902db
|
[
"MIT"
] | 1
|
2021-07-06T18:16:48.000Z
|
2021-07-06T18:16:48.000Z
|
src/routers/api.py
|
vcokltfre/bald-eagles
|
dac5420b403176d593fe0fee1b522e038b6902db
|
[
"MIT"
] | 1
|
2021-07-06T18:27:24.000Z
|
2021-07-06T18:27:24.000Z
|
src/routers/api.py
|
vcokltfre/bald-eagles
|
dac5420b403176d593fe0fee1b522e038b6902db
|
[
"MIT"
] | null | null | null |
from fastapi import APIRouter
router = APIRouter(prefix="/api")
| 13.2
| 33
| 0.757576
|
48bd05eb6c1e80d0c8e8cadbf43c0f923012f3a6
| 149
|
py
|
Python
|
calc media.py
|
paulokpv/codigos-python
|
78d41a3e07e68139c42b6bdf756d5d3f2d593ba4
|
[
"MIT"
] | null | null | null |
calc media.py
|
paulokpv/codigos-python
|
78d41a3e07e68139c42b6bdf756d5d3f2d593ba4
|
[
"MIT"
] | null | null | null |
calc media.py
|
paulokpv/codigos-python
|
78d41a3e07e68139c42b6bdf756d5d3f2d593ba4
|
[
"MIT"
] | null | null | null |
a=float(input('digite uma nota'))
b=float(input('digite outra nota'))
media= (a+b)/2
print('a media de {:.1f} e {:.1f} é {:.1f}'.format(a, b, media))
| 37.25
| 64
| 0.61745
|
6dcda9d4f5a7a6f98c7822367a35f08fdf1e602c
| 393
|
py
|
Python
|
CursoemVideo/challenge002.py
|
ElptsJunior/Python
|
7347b38947b439afa392764aafe0a55f808530dd
|
[
"MIT"
] | null | null | null |
CursoemVideo/challenge002.py
|
ElptsJunior/Python
|
7347b38947b439afa392764aafe0a55f808530dd
|
[
"MIT"
] | null | null | null |
CursoemVideo/challenge002.py
|
ElptsJunior/Python
|
7347b38947b439afa392764aafe0a55f808530dd
|
[
"MIT"
] | null | null | null |
print('\033[32m = \033[m'*27)
print(" BUILD AN PYTHON SCRIPT THAT READ'S DAY,MONTH AND YEAR AND RETURN THE VALUES ".title())
print('\033[32m = \033[m'*27)
year = int(input('Please insert the year - yyyy :'))
month = int(input('Now insert the month - mm :'))
day = int(input('insert your day - dd : '))
print(' the date inserted \033[7m{} / {} / {}\033[m isnt ? '.format(day, month, year))
| 39.3
| 95
| 0.633588
|
07f861f84d1e3fbf3c02b9712052a893fb54c602
| 14,905
|
py
|
Python
|
simple_applications/pytorch/popart_api/mnist/pytorch_popart_mnist.py
|
Paperspace/tutorials
|
8e20ffb687080c44e75dabea594d2b57acc53713
|
[
"MIT"
] | null | null | null |
simple_applications/pytorch/popart_api/mnist/pytorch_popart_mnist.py
|
Paperspace/tutorials
|
8e20ffb687080c44e75dabea594d2b57acc53713
|
[
"MIT"
] | null | null | null |
simple_applications/pytorch/popart_api/mnist/pytorch_popart_mnist.py
|
Paperspace/tutorials
|
8e20ffb687080c44e75dabea594d2b57acc53713
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2019 Graphcore Ltd. All rights reserved.
"""
A simple program that uses PyTorch to create a linear model and then
trains it on the MNIST data set using the popart library.
"""
import argparse
import numpy as np
import popart
import torch
import torch.nn as nn
import torch.nn.functional as func
from torchvision import datasets, transforms
from typing import Tuple
from collections import namedtuple
from time import time
import tempfile
# The following is a workaround for pytorch issue #1938
from six.moves import urllib
opener = urllib.request.build_opener()
opener.addheaders = [("User-agent", "Mozilla/5.0")]
urllib.request.install_opener(opener)
# Constants for the MNIST dataset
IMAGE_WIDTH = 28
IMAGE_HEIGHT = 28
NUM_CLASSES = 10
NUM_TEST_SAMPLES = 10000
# Constants for IPU emulator
TILES_PER_IPU = 1216
class Net(nn.Module):
"""Neural network module that defines the simple linear model to
classify MNIST digits.
Attributes:
fc: Fully connected layer between input and output.
"""
def __init__(self) -> None:
"""Initialize.
"""
super(Net, self).__init__()
self.fc = nn.Linear(IMAGE_WIDTH * IMAGE_HEIGHT, NUM_CLASSES)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""Define the forward pass.
Args:
x : Image input tensor.
Returns:
Softmax output probabilities per class.
"""
x = self.fc(x)
return func.softmax(x, dim=1)
def create_model(
batch_size: int, temp_file: tempfile.NamedTemporaryFile
) -> Tuple[str, str]:
"""Create Pytorch model and export as an ONNX protobuf.
Args:
batch_size : Batch size of the model.
temp_file : To hold the model
Returns:
image_input name, output_name
"""
net = Net()
image_input = "input_1"
output = "output_1"
input_names = [image_input] + [
"learned_%d" % i for i, _ in enumerate(net.parameters())
]
dummy_input = torch.randn(batch_size, IMAGE_WIDTH * IMAGE_HEIGHT)
torch.onnx.export(
net,
dummy_input,
temp_file.name,
input_names=input_names,
output_names=[output],
)
return image_input, output
def convert_model(
batch_size: int, protobuf_file: str, output_name: str
) -> Tuple[bytes, str, str]:
"""Create popart builder and loss for model.
Args:
batch_size : Batch size per inference.
protobuf_file : ONNX binary protobuf filename.
output_name: Name of the output Tensor using which loss must be computed
Returns:
Modelproto, label and loss.
"""
# Create builder from onnx protobuf file
builder = popart.Builder(protobuf_file)
# Set up label Tensor
label_shape = popart.TensorInfo("INT32", [batch_size])
label = builder.addInputTensor(label_shape)
# Add loss
loss = builder.aiGraphcore.nllloss([output_name, label], popart.ReductionType.Sum, debugContext="nllLossVal")
proto = builder.getModelProto()
return proto, label, loss
def get_data_loader(
cl_opts: argparse.Namespace, is_train: bool
) -> torch.utils.data.DataLoader:
"""Get dataloader for training/testing.
Args:
cl_opts: The command line arguments
is_train: Flag is True if training.
Returns:
Dataloader for the split requested.
"""
if cl_opts.syn_data_type in ["random_normal", "zeros"]:
print(
"Loading FAKE data {}".format(
"for training" if is_train else "for inference"
)
)
data_set = datasets.FakeData(
size=cl_opts.batch_size * cl_opts.batches_per_step,
image_size=(1, 28, 28),
num_classes=NUM_CLASSES,
transform=transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0,), (1,))]
),
)
else:
print(
"Loading MNIST data {}".format(
"for training" if is_train else "for inference"
)
)
data_set = datasets.MNIST(
"~/.torch/datasets",
train=is_train,
download=True,
transform=transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0,), (1,))]
),
)
return torch.utils.data.DataLoader(
data_set,
batch_size=cl_opts.batch_size * cl_opts.batches_per_step,
shuffle=is_train,
)
def preprocess_data(
data: torch.Tensor, label: torch.Tensor
) -> Tuple[np.ndarray, np.ndarray]:
"""Preprocess data from data loader.
Args:
data: image input
label: corresponding output
Returns: pre-processed data and label in numpy format.
"""
data, label = data.numpy(), label.numpy()
data = data.reshape(opts.batches_per_step, opts.batch_size, -1)
label = label.reshape(opts.batches_per_step, opts.batch_size)
label = label.astype(np.int32)
return data, label
def train(opts, model_file, ckpt_file) -> None:
"""
Train MNIST model using command line args.
Args:
opts: The command line options
model_file: Temporary file for holding the model
ckpt_file: Temporary file for holding the weights
"""
if not opts.test_mode:
max_value = NUM_TEST_SAMPLES // opts.batch_size
if max_value < opts.batches_per_step:
print(
"(batches-per-step * batch-size) is larger than test set!\n"
" Reduced batches-per-step to: {}\n".format(max_value)
)
opts.batches_per_step = max_value
# Construct MNIST data loaders
train_loader = get_data_loader(opts, is_train=True)
test_loader = get_data_loader(opts, is_train=False)
print("Creating ONNX model.")
data_in, output = create_model(opts.batch_size, model_file)
print("Converting model.")
proto, label_in, loss = convert_model(
opts.batch_size, model_file.name, output
)
# Describe how to run the model
anchor_desc = {
output: popart.AnchorReturnType("ALL"),
loss: popart.AnchorReturnType("ALL"),
}
dataFlow = popart.DataFlow(opts.batches_per_step, anchor_desc)
optimizer = popart.ConstSGD(0.01)
# Options
userOpts = popart.SessionOptions()
# Ensure weight tensors in the validation model are not modified by the IR
userOpts.constantWeights = False
# If requested, setup synthetic data
if opts.syn_data_type in ["random_normal", "zeros"]:
print(
"Running with Synthetic Data Type '{}'".format(opts.syn_data_type)
)
if opts.syn_data_type == "random_normal":
userOpts.syntheticDataMode = popart.SyntheticDataMode.RandomNormal
elif opts.syn_data_type == "zeros":
userOpts.syntheticDataMode = popart.SyntheticDataMode.Zeros
# Select a device
deviceManager = popart.DeviceManager()
if opts.simulation:
print("Running using IPU MODEL")
options = {
"compileIPUCode": True,
"numIPUs": 1,
"tilesPerIPU": TILES_PER_IPU,
}
device = deviceManager.createIpuModelDevice(options)
else:
print("Running using Hardware")
device = deviceManager.acquireAvailableDevice()
if device is None:
print("Failed to acquire IPU. Exiting.")
return
if opts.test_mode:
print(" IPU IDs: {}".format(device.driverIds))
def init_session(proto, loss, dataFlow, userOpts, device, training, opts):
# Create a session to compile and execute the graph
if opts.test_mode:
userOpts.instrumentWithHardwareCycleCounter = True
if training:
session = popart.TrainingSession(
fnModel=proto,
loss=loss,
optimizer=optimizer,
dataFlow=dataFlow,
userOptions=userOpts,
deviceInfo=device,
)
else:
session = popart.InferenceSession(
fnModel=proto,
dataFlow=dataFlow,
userOptions=userOpts,
deviceInfo=device,
)
print(
"Compiling the {} graph.".format(
"training" if training else "validation"
)
)
session.prepareDevice()
# Create buffers to receive results from the execution
anchors = session.initAnchorArrays()
Session = namedtuple("Session", ["session", "anchors"])
return Session(session, anchors)
training = init_session(proto, loss, dataFlow, userOpts, device, True, opts)
validation = init_session(
proto, loss, dataFlow, userOpts, device, False, opts
)
inputs_per_step = opts.batch_size * opts.batches_per_step
for i in range(opts.epochs):
# Training
if i > 0:
training.session.resetHostWeights(ckpt_file.name)
training.session.weightsFromHost()
for data, label in train_loader:
if len(label) != inputs_per_step:
continue
data, label = preprocess_data(data, label)
stepio = popart.PyStepIO(
{data_in: data, label_in: label}, training.anchors
)
if opts.test_mode == "training":
start = time()
training.session.run(stepio)
if opts.test_mode == "training":
duration = time() - start
report_string = "{:<8.3} sec/itr.".format(duration)
report_string += " " + iteration_report(opts, duration)
print(report_string)
print(
"Hardware cycle count per 'run':",
training.session.getCycleCount(),
)
print("Total time: {}".format(duration))
training.session.modelToHost(ckpt_file.name)
if not opts.validation_final_epoch or i == opts.epochs - 1:
# Evaluation
aggregated_loss = 0
num_correct = 0
validation.session.resetHostWeights(ckpt_file.name)
validation.session.weightsFromHost()
for data, label in test_loader:
if len(label) != inputs_per_step:
continue
data, label = preprocess_data(data, label)
stepio = popart.PyStepIO(
{data_in: data, label_in: label}, validation.anchors
)
if opts.test_mode == "inference":
start = time()
validation.session.run(stepio)
if opts.test_mode == "inference":
duration = time() - start
report_string = "{:<8.3} sec/itr.".format(duration)
report_string += " " + iteration_report(opts, duration)
print(report_string)
print(
"Hardware cycle count per 'run':",
validation.session.getCycleCount(),
)
print("Total time: {}".format(duration))
aggregated_loss += np.mean(validation.anchors[loss])
results = np.argmax(
validation.anchors[output].reshape(
[inputs_per_step, NUM_CLASSES]
),
1,
)
score = results == label.reshape([inputs_per_step])
num_correct += np.sum(score)
aggregated_loss /= len(test_loader)
accuracy = num_correct / len(test_loader.dataset)
# Log statistics
print("Epoch #{}".format(i))
print(" Loss={0:.4f}".format(aggregated_loss))
print(" Accuracy={0:.2f}%".format(accuracy * 100))
def iteration_report(opts, time):
return "{:5f} images/sec.".format(
opts.batch_size * opts.batches_per_step / time
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="MNIST training in PyTorch with popart backend.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"--batch-size", type=int, default=32, help="Set the Batch size"
)
parser.add_argument(
"--batches-per-step",
type=int,
default=100,
help="Number of minibatches to perform on the Device before returning t"
"o the Host. This will be capped so the Device returns each epoch.",
)
parser.add_argument(
"--epochs", type=int, default=10, help="Number of epochs to train for."
)
parser.add_argument(
"--simulation",
action="store_true",
help="Run the example with an IPU_MODEL device.",
)
parser.add_argument(
"--log-graph-trace",
action="store_true",
help="Turn on ir logging to display the graph's ops.",
)
parser.add_argument(
"--test-mode",
type=str,
help="Output extra performance information, specify wit"
"h either 'training' or 'inference'",
)
parser.add_argument(
"--syn-data-type",
type=str,
default="off",
help="Specify to use synthetic data with either 'random"
"_normal' or 'zeros'",
)
parser.add_argument(
"--validation-final-epoch",
action='store_true',
help="Only run validation after the final epoch.",
)
opts = parser.parse_args()
# Validate synthetic data argument given
if opts.syn_data_type:
valids = ["random_normal", "zeros", "off"]
if opts.syn_data_type not in valids:
raise ValueError(
"'--syn-data-type' must be one of {}".format(valids)
)
# Validate test mode given
if opts.test_mode:
valids = ["training", "inference"]
if opts.test_mode not in valids:
raise ValueError("'--test-mode' must be one of {}".format(valids))
# Validate the given batch size and batches per step
total = opts.batch_size * opts.batches_per_step
if NUM_TEST_SAMPLES < total or total < 1:
raise ValueError(
"'--batch-size' ({}) multiplied by '--batches-per-step"
"' ({}) comes to {} which is not in the range of avail"
"able images ({})".format(
opts.batch_size, opts.batches_per_step, total, NUM_TEST_SAMPLES
)
)
# Set logging
popart.getLogger("ir").setLevel(
"TRACE" if opts.log_graph_trace else "CRITICAL"
)
popart.getLogger("devicex").setLevel("CRITICAL")
with tempfile.NamedTemporaryFile() as model_file:
with tempfile.NamedTemporaryFile() as ckpt_file:
train(opts, model_file, ckpt_file)
| 32.261905
| 113
| 0.597451
|
0a545bb5308fb60221d28269183e4f8388ca4690
| 1,277
|
py
|
Python
|
CloudBackup/mail.py
|
520github/CloudBackup
|
ec4a48f1ba438dbaf45d518c5ae0f192b6e7aa96
|
[
"Apache-2.0"
] | 9
|
2015-08-23T09:08:14.000Z
|
2019-04-29T02:08:11.000Z
|
CloudBackup/mail.py
|
chineking/CloudBackup
|
ec4a48f1ba438dbaf45d518c5ae0f192b6e7aa96
|
[
"Apache-2.0"
] | null | null | null |
CloudBackup/mail.py
|
chineking/CloudBackup
|
ec4a48f1ba438dbaf45d518c5ae0f192b6e7aa96
|
[
"Apache-2.0"
] | 5
|
2016-07-19T03:38:10.000Z
|
2017-12-06T21:13:42.000Z
|
#!/usr/bin/env python
#coding=utf-8
'''
Copyright (c) 2012 chine <qin@qinxuye.me>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Created on 2012-5-22
@author: Chine
'''
import smtplib
from email.mime.text import MIMEText
from CloudBackup.test.settings import EMAIL_HOST, EMAIL_HOST_PASSWORD, EMAIL_HOST_USER
def send_mail(to_list, subject, content):
msg = MIMEText(content, _charset='utf-8')
msg['Subject'] = subject
msg['From'] = EMAIL_HOST_USER
msg['To'] = ';'.join(to_list)
try:
s = smtplib.SMTP()
s.connect(EMAIL_HOST)
s.login(EMAIL_HOST_USER, EMAIL_HOST_PASSWORD)
s.sendmail(EMAIL_HOST_USER , to_list, msg.as_string())
s.close()
return True
except Exception, e:
print str(e)
return False
| 29.022727
| 86
| 0.709475
|
dca8636a2ad03cb355cabd35c1e06b323891dde2
| 7,786
|
py
|
Python
|
experiments/main.py
|
vishalbelsare/generalised-signature-method
|
78c1a8bf7f5c31c61b32d18732f58da916db3855
|
[
"MIT"
] | 29
|
2020-06-02T12:07:40.000Z
|
2022-03-15T05:30:57.000Z
|
experiments/main.py
|
vishalbelsare/generalised-signature-method
|
78c1a8bf7f5c31c61b32d18732f58da916db3855
|
[
"MIT"
] | 2
|
2021-06-25T04:53:25.000Z
|
2021-08-24T12:17:47.000Z
|
experiments/main.py
|
vishalbelsare/generalised-signature-method
|
78c1a8bf7f5c31c61b32d18732f58da916db3855
|
[
"MIT"
] | 5
|
2020-06-05T23:12:34.000Z
|
2021-06-08T11:15:54.000Z
|
"""
main.py
=========================
The main experiment run file.
"""
from definitions import *
from sacred import Experiment
import argparse
from experiments.dicts.configurations import configs
from experiments.dicts.data_dicts import datasets_dict
from experiments.ingredients.prepare.checkers import check_sklearn, check_learnt, check_meta
from experiments.ingredients.prepare.prepare_data import get_data, preprocess, compute_input_size
from experiments.utils import create_fso, basic_gridsearch, handle_error, set_completion_state
from experiments.ingredients.train import train_models
from experiments.ingredients.evaluate import evaluate_models
# For running in parallel
parser = argparse.ArgumentParser()
parser.add_argument('-e', '--ex_name', help='The experiment name (determines the save folder).', default='copy_conf')
parser.add_argument('-c', '--config', help='The configuration entry key.', default=['test'], nargs='+')
parser.add_argument('-ds', '--datasets', help='The name of the datasets to run.', default=['ERing'], nargs='+')
parser.add_argument('-r', '--resume', help='Resume runs in a folder that already has saves.', action='store_true')
parser.add_argument('-j', '--jobs', help='Set True to parallelise the runs over the datasets.', default=1, type=int)
parser.add_argument('-s', '--save', help='Set True to save the model into a dill file.', action='store_true')
args = parser.parse_args()
# Handle the dataset arg
if len(args.datasets) == 1:
if args.datasets[0] in datasets_dict.keys():
args.datasets = datasets_dict[args.datasets[0]]
# Parallelise over the datasets if specified
if args.jobs > 1:
datasets = ' '.join(args.datasets)
config_strs = ' '.join(args.config)
resume = '-r' if args.resume else ''
save = '-s' if args.save else ''
command = 'parallel -j {} --bar python main.py -c {{1}} -ds {{2}} {} {} ::: {} ::: {}' \
''.format(args.jobs, resume, save, config_strs, datasets)
print('Running command: {}'.format(command))
os.system(command)
exit()
else:
assert len(args.config) == 1, "Cannot have multiple configs when not in parallel mode. Set the -j flag to be > 1."
# Set the experiment and save folder
args.config = args.config[0]
ex_name = args.ex_name if args.ex_name != 'copy_conf' else args.config
save_dir = RESULTS_DIR + '/' + ex_name
ex = Experiment(ex_name)
# If the directory exists, proceed only when the user has confirmed they are aware of this
if os.path.exists(save_dir):
if not args.resume:
raise Exception("Runs already exist at: {}. \nPass the resume (-r) flag to confirm you are aware of this and "
"wish to proceed. \nElse delete the folder or change (-e) to a folder that doesn't yet exist."
.format(save_dir))
# Default configuration
@ex.config
def my_config():
verbose = 2 # Verbosity level
gpu = True # Enable GPU
sanity_dim = 1e5 # Max number of features
ds_name = 'AtrialFibrillation' # Dataset to load
train_test_split = 'original' # How to create train/test set
scaling = 'stdsc' # Feature scaling
tfms = ['addtime'] # Basic augmentations
rescaling = 'pre' # Signature rescaling
disintegrations = None # Disintegrate into paths of size k
num_augments = None # Number of augmentations
augment_out = None # Number of output channels for each augmentation
num_projections = None # Number of projections
projection_channels = None # Number of channels for each projection
normalisation = None # Normalisation type
window = ('Global', {}) # Windowing type and arguments
sig_tfm = 'signature' # Signature transform
depth = 3 # Signature depth
clf = 'rf' # Classifier
grid_search = False # Whether to gridsearch over the parameters
save_best_model = False # Saves the best model as a .dill file.
# Main run file
@ex.main
def main(_run,
ds_name,
train_test_split,
verbose,
gpu,
sanity_dim,
scaling,
tfms,
clf,
grid_search,
rescaling,
disintegrations,
num_augments,
augment_out,
num_projections,
projection_channels,
window,
depth,
sig_tfm,
normalisation,
save_best_model
):
try:
# if True:
# Add in save_dir
_run.save_dir = '{}/{}'.format(save_dir, _run._id)
ds_train, ds_test = get_data(ds_name, train_test_split)
# Apply tfms here so they are not computed multiple times
path_tfms, in_channels = preprocess(ds_train, scaling, tfms)
# Open out some params
ds_length, ds_dim, n_classes = ds_train.size(1), ds_train.size(2), ds_train.n_classes
window_name, window_kwargs = window
# Get in_channels with sanity check
in_channels_clf, signature_channels = compute_input_size(
in_channels, ds_length, window_name, window_kwargs, clf, disintegrations, augment_out, num_augments,
num_projections, projection_channels, sig_tfm, depth, sanity_dim=sanity_dim
)
# Store some useful info to the saved metrics.
_run.log_scalar('ds_length', ds_length)
_run.log_scalar('ds_dim', ds_dim)
_run.log_scalar('n_classes', n_classes)
_run.log_scalar('n_train_samples', ds_train.size(0))
_run.log_scalar('n_test_samples', ds_test.size(0))
_run.log_scalar('in_channels_clf', in_channels_clf)
# Perform checks to inform algorithm building
is_learnt = check_learnt(num_augments, augment_out, normalisation)
is_sklearn = check_sklearn(clf)
is_meta = check_meta(window_name, clf)
# Args used to build the signature model
model_args = {
'in_channels': in_channels,
'signature_channels': signature_channels,
'out_channels': n_classes if n_classes > 2 else 1,
'ds_length': ds_length,
'disintegrations': disintegrations,
'num_augments': num_augments,
'augment_out': augment_out,
'num_projections': num_projections,
'projection_channels': projection_channels,
'window_name': window_name,
'window_kwargs': window_kwargs,
'sig_tfm': sig_tfm,
'depth': depth,
'rescaling': rescaling,
'normalisation': normalisation,
'clf': clf,
'in_channels_clf': in_channels_clf,
'gpu': gpu
}
# Train the small and large model.
model_dict = train_models(
_run, model_args, path_tfms, ds_train, is_learnt, is_sklearn, is_meta, grid_search, verbose=verbose
)
# Get training
evaluate_models(_run, model_dict, ds_train, ds_test, is_sklearn, n_classes, save_best_model)
# Note no errors
_run.log_scalar('error', None)
set_completion_state(_run, True) # Mark as completed
except Exception as e:
handle_error(_run, e, print_error=True)
if __name__ == '__main__':
# Configuration
config = configs[str(args.config)]
# Update the configuration with the CL-args.
config['ds_name'] = args.datasets
config['save_best_model'] = [args.save]
# Create FSO (this creates a folder to log information into).
create_fso(ex, save_dir, remove_folder=False)
# Run a gridsearch over all parameter combinations.
basic_gridsearch(ex, config, handle_completed_state=False)
| 40.134021
| 118
| 0.644875
|
58432b5ceaf3ee11467039285fa3a3304c59227b
| 28,998
|
py
|
Python
|
octavia/tests/unit/controller/worker/tasks/test_amphora_driver_tasks.py
|
BoTranVan/octavia
|
70665664b2130f276291cefac0ed3bc0878d6cd9
|
[
"Apache-2.0"
] | null | null | null |
octavia/tests/unit/controller/worker/tasks/test_amphora_driver_tasks.py
|
BoTranVan/octavia
|
70665664b2130f276291cefac0ed3bc0878d6cd9
|
[
"Apache-2.0"
] | null | null | null |
octavia/tests/unit/controller/worker/tasks/test_amphora_driver_tasks.py
|
BoTranVan/octavia
|
70665664b2130f276291cefac0ed3bc0878d6cd9
|
[
"Apache-2.0"
] | 1
|
2021-12-27T13:18:38.000Z
|
2021-12-27T13:18:38.000Z
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_utils import uuidutils
from taskflow.types import failure
from octavia.amphorae.driver_exceptions import exceptions as driver_except
from octavia.common import constants
from octavia.common import data_models
from octavia.controller.worker.tasks import amphora_driver_tasks
from octavia.db import repositories as repo
import octavia.tests.unit.base as base
AMP_ID = uuidutils.generate_uuid()
COMPUTE_ID = uuidutils.generate_uuid()
LISTENER_ID = uuidutils.generate_uuid()
LB_ID = uuidutils.generate_uuid()
CONN_MAX_RETRIES = 10
CONN_RETRY_INTERVAL = 6
FAKE_CONFIG_FILE = 'fake config file'
_amphora_mock = mock.MagicMock()
_amphora_mock.id = AMP_ID
_amphora_mock.status = constants.AMPHORA_ALLOCATED
_load_balancer_mock = mock.MagicMock()
_load_balancer_mock.id = LB_ID
_listener_mock = mock.MagicMock()
_listener_mock.id = LISTENER_ID
_load_balancer_mock.listeners = [_listener_mock]
_vip_mock = mock.MagicMock()
_load_balancer_mock.vip = _vip_mock
_LB_mock = mock.MagicMock()
_amphorae_mock = [_amphora_mock]
_network_mock = mock.MagicMock()
_port_mock = mock.MagicMock()
_ports_mock = [_port_mock]
_session_mock = mock.MagicMock()
@mock.patch('octavia.db.repositories.AmphoraRepository.update')
@mock.patch('octavia.db.repositories.ListenerRepository.update')
@mock.patch('octavia.db.repositories.ListenerRepository.get',
return_value=_listener_mock)
@mock.patch('octavia.db.api.get_session', return_value=_session_mock)
@mock.patch('octavia.controller.worker.tasks.amphora_driver_tasks.LOG')
@mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID)
@mock.patch('stevedore.driver.DriverManager.driver')
class TestAmphoraDriverTasks(base.TestCase):
def setUp(self):
_LB_mock.amphorae = [_amphora_mock]
_LB_mock.id = LB_ID
conf = oslo_fixture.Config(cfg.CONF)
conf.config(group="haproxy_amphora",
active_connection_max_retries=CONN_MAX_RETRIES)
conf.config(group="haproxy_amphora",
active_connection_rety_interval=CONN_RETRY_INTERVAL)
conf.config(group="controller_worker",
loadbalancer_topology=constants.TOPOLOGY_SINGLE)
super(TestAmphoraDriverTasks, self).setUp()
def test_amp_listener_update(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
timeout_dict = {constants.REQ_CONN_TIMEOUT: 1,
constants.REQ_READ_TIMEOUT: 2,
constants.CONN_MAX_RETRIES: 3,
constants.CONN_RETRY_INTERVAL: 4}
amp_list_update_obj = amphora_driver_tasks.AmpListenersUpdate()
amp_list_update_obj.execute([_listener_mock], 0,
[_amphora_mock], timeout_dict)
mock_driver.update_amphora_listeners.assert_called_once_with(
[_listener_mock], 0, [_amphora_mock], timeout_dict)
mock_driver.update_amphora_listeners.side_effect = Exception('boom')
amp_list_update_obj.execute([_listener_mock], 0,
[_amphora_mock], timeout_dict)
mock_amphora_repo_update.assert_called_once_with(
_session_mock, AMP_ID, status=constants.ERROR)
def test_listener_update(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
listener_update_obj = amphora_driver_tasks.ListenersUpdate()
listener_update_obj.execute(_load_balancer_mock, [_listener_mock])
mock_driver.update.assert_called_once_with(_listener_mock, _vip_mock)
# Test the revert
amp = listener_update_obj.revert(_load_balancer_mock)
repo.ListenerRepository.update.assert_called_once_with(
_session_mock,
id=LISTENER_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
# Test the revert with exception
repo.ListenerRepository.update.reset_mock()
mock_listener_repo_update.side_effect = Exception('fail')
amp = listener_update_obj.revert(_load_balancer_mock)
repo.ListenerRepository.update.assert_called_once_with(
_session_mock,
id=LISTENER_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
def test_listeners_update(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
listeners_update_obj = amphora_driver_tasks.ListenersUpdate()
listeners = [data_models.Listener(id='listener1'),
data_models.Listener(id='listener2')]
vip = data_models.Vip(ip_address='10.0.0.1')
lb = data_models.LoadBalancer(id='lb1', listeners=listeners, vip=vip)
listeners_update_obj.execute(lb, listeners)
mock_driver.update.assert_has_calls([mock.call(listeners[0], vip),
mock.call(listeners[1], vip)])
self.assertEqual(2, mock_driver.update.call_count)
self.assertIsNotNone(listeners[0].load_balancer)
self.assertIsNotNone(listeners[1].load_balancer)
# Test the revert
amp = listeners_update_obj.revert(lb)
expected_db_calls = [mock.call(_session_mock,
id=listeners[0].id,
provisioning_status=constants.ERROR),
mock.call(_session_mock,
id=listeners[1].id,
provisioning_status=constants.ERROR)]
repo.ListenerRepository.update.has_calls(expected_db_calls)
self.assertEqual(2, repo.ListenerRepository.update.call_count)
self.assertIsNone(amp)
def test_listener_stop(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
listener_stop_obj = amphora_driver_tasks.ListenerStop()
listener_stop_obj.execute(_load_balancer_mock, _listener_mock)
mock_driver.stop.assert_called_once_with(_listener_mock, _vip_mock)
# Test the revert
amp = listener_stop_obj.revert(_listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
_session_mock,
id=LISTENER_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
# Test the revert with exception
repo.ListenerRepository.update.reset_mock()
mock_listener_repo_update.side_effect = Exception('fail')
amp = listener_stop_obj.revert(_listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
_session_mock,
id=LISTENER_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
def test_listener_start(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
listener_start_obj = amphora_driver_tasks.ListenerStart()
listener_start_obj.execute(_load_balancer_mock, _listener_mock)
mock_driver.start.assert_called_once_with(_listener_mock, _vip_mock)
# Test the revert
amp = listener_start_obj.revert(_listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
_session_mock,
id=LISTENER_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
# Test the revert with exception
repo.ListenerRepository.update.reset_mock()
mock_listener_repo_update.side_effect = Exception('fail')
amp = listener_start_obj.revert(_listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
_session_mock,
id=LISTENER_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
def test_listener_delete(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
listener_delete_obj = amphora_driver_tasks.ListenerDelete()
listener_delete_obj.execute(_load_balancer_mock, _listener_mock)
mock_driver.delete.assert_called_once_with(_listener_mock, _vip_mock)
# Test the revert
amp = listener_delete_obj.revert(_listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
_session_mock,
id=LISTENER_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
# Test the revert with exception
repo.ListenerRepository.update.reset_mock()
mock_listener_repo_update.side_effect = Exception('fail')
amp = listener_delete_obj.revert(_listener_mock)
repo.ListenerRepository.update.assert_called_once_with(
_session_mock,
id=LISTENER_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
def test_amphora_get_info(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amphora_get_info_obj = amphora_driver_tasks.AmphoraGetInfo()
amphora_get_info_obj.execute(_amphora_mock)
mock_driver.get_info.assert_called_once_with(
_amphora_mock)
def test_amphora_get_diagnostics(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amphora_get_diagnostics_obj = (amphora_driver_tasks.
AmphoraGetDiagnostics())
amphora_get_diagnostics_obj.execute(_amphora_mock)
mock_driver.get_diagnostics.assert_called_once_with(
_amphora_mock)
def test_amphora_finalize(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amphora_finalize_obj = amphora_driver_tasks.AmphoraFinalize()
amphora_finalize_obj.execute(_amphora_mock)
mock_driver.finalize_amphora.assert_called_once_with(
_amphora_mock)
# Test revert
amp = amphora_finalize_obj.revert(None, _amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
_session_mock,
id=AMP_ID,
status=constants.ERROR)
self.assertIsNone(amp)
# Test revert with exception
repo.AmphoraRepository.update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
amp = amphora_finalize_obj.revert(None, _amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
_session_mock,
id=AMP_ID,
status=constants.ERROR)
self.assertIsNone(amp)
def test_amphora_post_network_plug(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amphora_post_network_plug_obj = (amphora_driver_tasks.
AmphoraPostNetworkPlug())
amphora_post_network_plug_obj.execute(_amphora_mock, _ports_mock)
(mock_driver.post_network_plug.
assert_called_once_with)(_amphora_mock, _port_mock)
# Test revert
amp = amphora_post_network_plug_obj.revert(None, _amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
_session_mock,
id=AMP_ID,
status=constants.ERROR)
self.assertIsNone(amp)
# Test revert with exception
repo.AmphoraRepository.update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
amp = amphora_post_network_plug_obj.revert(None, _amphora_mock)
repo.AmphoraRepository.update.assert_called_once_with(
_session_mock,
id=AMP_ID,
status=constants.ERROR)
self.assertIsNone(amp)
def test_amphorae_post_network_plug(self, mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
mock_driver.get_network.return_value = _network_mock
_amphora_mock.id = AMP_ID
_amphora_mock.compute_id = COMPUTE_ID
_LB_mock.amphorae = [_amphora_mock]
amphora_post_network_plug_obj = (amphora_driver_tasks.
AmphoraePostNetworkPlug())
port_mock = mock.Mock()
_deltas_mock = {_amphora_mock.id: [port_mock]}
amphora_post_network_plug_obj.execute(_LB_mock, _deltas_mock)
(mock_driver.post_network_plug.
assert_called_once_with(_amphora_mock, port_mock))
# Test revert
amp = amphora_post_network_plug_obj.revert(None, _LB_mock,
_deltas_mock)
repo.AmphoraRepository.update.assert_called_once_with(
_session_mock,
id=AMP_ID,
status=constants.ERROR)
self.assertIsNone(amp)
# Test revert with exception
repo.AmphoraRepository.update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
amp = amphora_post_network_plug_obj.revert(None, _LB_mock,
_deltas_mock)
repo.AmphoraRepository.update.assert_called_once_with(
_session_mock,
id=AMP_ID,
status=constants.ERROR)
self.assertIsNone(amp)
@mock.patch('octavia.db.repositories.LoadBalancerRepository.update')
def test_amphora_post_vip_plug(self,
mock_loadbalancer_repo_update,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amphorae_net_config_mock = mock.Mock()
amphora_post_vip_plug_obj = amphora_driver_tasks.AmphoraPostVIPPlug()
amphora_post_vip_plug_obj.execute(_amphora_mock,
_LB_mock,
amphorae_net_config_mock)
mock_driver.post_vip_plug.assert_called_once_with(
_amphora_mock, _LB_mock, amphorae_net_config_mock)
# Test revert
amp = amphora_post_vip_plug_obj.revert(None, _amphora_mock, _LB_mock)
repo.AmphoraRepository.update.assert_called_once_with(
_session_mock,
id=AMP_ID,
status=constants.ERROR)
repo.LoadBalancerRepository.update.assert_called_once_with(
_session_mock,
id=LB_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
# Test revert with repo exceptions
repo.AmphoraRepository.update.reset_mock()
repo.LoadBalancerRepository.update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
mock_loadbalancer_repo_update.side_effect = Exception('fail')
amp = amphora_post_vip_plug_obj.revert(None, _amphora_mock, _LB_mock)
repo.AmphoraRepository.update.assert_called_once_with(
_session_mock,
id=AMP_ID,
status=constants.ERROR)
repo.LoadBalancerRepository.update.assert_called_once_with(
_session_mock,
id=LB_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
@mock.patch('octavia.db.repositories.LoadBalancerRepository.update')
def test_amphorae_post_vip_plug(self,
mock_loadbalancer_repo_update,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amphorae_net_config_mock = mock.Mock()
amphora_post_vip_plug_obj = amphora_driver_tasks.AmphoraePostVIPPlug()
amphora_post_vip_plug_obj.execute(_LB_mock,
amphorae_net_config_mock)
mock_driver.post_vip_plug.assert_called_once_with(
_amphora_mock, _LB_mock, amphorae_net_config_mock)
# Test revert
amp = amphora_post_vip_plug_obj.revert(None, _LB_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
_session_mock,
id=LB_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
# Test revert with exception
repo.LoadBalancerRepository.update.reset_mock()
mock_loadbalancer_repo_update.side_effect = Exception('fail')
amp = amphora_post_vip_plug_obj.revert(None, _LB_mock)
repo.LoadBalancerRepository.update.assert_called_once_with(
_session_mock,
id=LB_ID,
provisioning_status=constants.ERROR)
self.assertIsNone(amp)
def test_amphora_cert_upload(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
pem_file_mock = 'test-perm-file'
amphora_cert_upload_mock = amphora_driver_tasks.AmphoraCertUpload()
amphora_cert_upload_mock.execute(_amphora_mock, pem_file_mock)
mock_driver.upload_cert_amp.assert_called_once_with(
_amphora_mock, pem_file_mock)
def test_amphora_update_vrrp_interface(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
_LB_mock.amphorae = _amphorae_mock
timeout_dict = {constants.CONN_MAX_RETRIES: CONN_MAX_RETRIES,
constants.CONN_RETRY_INTERVAL: CONN_RETRY_INTERVAL}
amphora_update_vrrp_interface_obj = (
amphora_driver_tasks.AmphoraUpdateVRRPInterface())
amphora_update_vrrp_interface_obj.execute(_LB_mock)
mock_driver.get_vrrp_interface.assert_called_once_with(
_amphora_mock, timeout_dict=timeout_dict)
# Test revert
mock_driver.reset_mock()
_LB_mock.amphorae = _amphorae_mock
amphora_update_vrrp_interface_obj.revert("BADRESULT", _LB_mock)
mock_amphora_repo_update.assert_called_with(_session_mock,
_amphora_mock.id,
vrrp_interface=None)
mock_driver.reset_mock()
mock_amphora_repo_update.reset_mock()
failure_obj = failure.Failure.from_exception(Exception("TESTEXCEPT"))
amphora_update_vrrp_interface_obj.revert(failure_obj, _LB_mock)
self.assertFalse(mock_amphora_repo_update.called)
# Test revert with exception
mock_driver.reset_mock()
mock_amphora_repo_update.reset_mock()
mock_amphora_repo_update.side_effect = Exception('fail')
_LB_mock.amphorae = _amphorae_mock
amphora_update_vrrp_interface_obj.revert("BADRESULT", _LB_mock)
mock_amphora_repo_update.assert_called_with(_session_mock,
_amphora_mock.id,
vrrp_interface=None)
def test_amphora_vrrp_update(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amphorae_network_config = mock.MagicMock()
amphora_vrrp_update_obj = (
amphora_driver_tasks.AmphoraVRRPUpdate())
amphora_vrrp_update_obj.execute(_LB_mock, amphorae_network_config)
mock_driver.update_vrrp_conf.assert_called_once_with(
_LB_mock, amphorae_network_config)
def test_amphora_vrrp_stop(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amphora_vrrp_stop_obj = (
amphora_driver_tasks.AmphoraVRRPStop())
amphora_vrrp_stop_obj.execute(_LB_mock)
mock_driver.stop_vrrp_service.assert_called_once_with(_LB_mock)
def test_amphora_vrrp_start(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amphora_vrrp_start_obj = (
amphora_driver_tasks.AmphoraVRRPStart())
amphora_vrrp_start_obj.execute(_LB_mock)
mock_driver.start_vrrp_service.assert_called_once_with(_LB_mock)
def test_amphora_compute_connectivity_wait(self,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
amp_compute_conn_wait_obj = (
amphora_driver_tasks.AmphoraComputeConnectivityWait())
amp_compute_conn_wait_obj.execute(_amphora_mock)
mock_driver.get_info.assert_called_once_with(_amphora_mock)
mock_driver.get_info.side_effect = driver_except.TimeOutException()
self.assertRaises(driver_except.TimeOutException,
amp_compute_conn_wait_obj.execute, _amphora_mock)
mock_amphora_repo_update.assert_called_once_with(
_session_mock, AMP_ID, status=constants.ERROR)
@mock.patch('octavia.amphorae.backends.agent.agent_jinja_cfg.'
'AgentJinjaTemplater.build_agent_config')
def test_amphora_config_update(self,
mock_build_config,
mock_driver,
mock_generate_uuid,
mock_log,
mock_get_session,
mock_listener_repo_get,
mock_listener_repo_update,
mock_amphora_repo_update):
mock_build_config.return_value = FAKE_CONFIG_FILE
amp_config_update_obj = amphora_driver_tasks.AmphoraConfigUpdate()
mock_driver.update_amphora_agent_config.side_effect = [
None, None, driver_except.AmpDriverNotImplementedError,
driver_except.TimeOutException]
# With Flavor
flavor = {constants.LOADBALANCER_TOPOLOGY:
constants.TOPOLOGY_ACTIVE_STANDBY}
amp_config_update_obj.execute(_amphora_mock, flavor)
mock_build_config.assert_called_once_with(
_amphora_mock.id, constants.TOPOLOGY_ACTIVE_STANDBY)
mock_driver.update_amphora_agent_config.assert_called_once_with(
_amphora_mock, FAKE_CONFIG_FILE)
# With no Flavor
mock_driver.reset_mock()
mock_build_config.reset_mock()
amp_config_update_obj.execute(_amphora_mock, None)
mock_build_config.assert_called_once_with(
_amphora_mock.id, constants.TOPOLOGY_SINGLE)
mock_driver.update_amphora_agent_config.assert_called_once_with(
_amphora_mock, FAKE_CONFIG_FILE)
# With amphora that does not support config update
mock_driver.reset_mock()
mock_build_config.reset_mock()
amp_config_update_obj.execute(_amphora_mock, flavor)
mock_build_config.assert_called_once_with(
_amphora_mock.id, constants.TOPOLOGY_ACTIVE_STANDBY)
mock_driver.update_amphora_agent_config.assert_called_once_with(
_amphora_mock, FAKE_CONFIG_FILE)
# With an unknown exception
mock_driver.reset_mock()
mock_build_config.reset_mock()
self.assertRaises(driver_except.TimeOutException,
amp_config_update_obj.execute,
_amphora_mock, flavor)
| 43.41018
| 78
| 0.595489
|
4661c37353b3e408cd76ba0b1aed4992f3bdb864
| 29
|
py
|
Python
|
proposals/utils/__init__.py
|
mindruion/test
|
d27ef1caf8f76aead934bc83be7729f79a4be503
|
[
"MIT"
] | 2
|
2017-04-22T11:07:13.000Z
|
2018-03-02T12:23:24.000Z
|
proposals/utils/__init__.py
|
mindruion/test
|
d27ef1caf8f76aead934bc83be7729f79a4be503
|
[
"MIT"
] | 124
|
2020-04-30T07:06:58.000Z
|
2022-03-28T12:50:16.000Z
|
proposals/utils/__init__.py
|
mindruion/test
|
d27ef1caf8f76aead934bc83be7729f79a4be503
|
[
"MIT"
] | 1
|
2021-08-04T11:44:21.000Z
|
2021-08-04T11:44:21.000Z
|
from .proposal_utils import *
| 29
| 29
| 0.827586
|
d3acd9479decd4bd493fb34223acf0ed85c47262
| 21,870
|
py
|
Python
|
RPI/yolov5/detect.py
|
Aditya239233/MDP
|
87491e1d67e547c11f4bdd5d784d120473429eae
|
[
"MIT"
] | 4
|
2022-01-14T15:06:43.000Z
|
2022-01-18T14:45:04.000Z
|
RPI/yolov5/detect.py
|
Aditya239233/MDP
|
87491e1d67e547c11f4bdd5d784d120473429eae
|
[
"MIT"
] | null | null | null |
RPI/yolov5/detect.py
|
Aditya239233/MDP
|
87491e1d67e547c11f4bdd5d784d120473429eae
|
[
"MIT"
] | null | null | null |
# YOLOv5 🚀 by Ultralytics, GPL-3.0 license
"""
Run inference on images, videos, directories, streams, etc.
Usage:
$ python path/to/detect.py --source path/to/img.jpg --weights yolov5s.pt --img 640
"""
from algorithm.planner.main import Runner
from PiTransmitter import sendData,getAndroidData
from config import WEIGHTSPATH, IMGCONF, CONF, SOURCE,FILELOCATION,FONT_SIZE
import argparse
import sys
import time
from pathlib import Path
import os
from imutils import paths
import imutils
from PIL import Image
import socket
import cv2
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import time
from datetime import datetime
FILE = Path(__file__).absolute()
sys.path.append(FILE.parents[0].as_posix()) # add yolov5/ to path
from models.experimental import attempt_load
from utils.datasets import LoadStreams, LoadImages
from utils.general import check_img_size, check_requirements, check_imshow, colorstr, is_ascii, non_max_suppression, \
apply_classifier, scale_coords, xyxy2xywh, strip_optimizer, set_logging, increment_path, save_one_box
from utils.plots import Annotator, colors
from utils.torch_utils import select_device, load_classifier, time_sync
def concat_tile(im_list_2d):
return cv2.vconcat([cv2.hconcat(im_list_h) for im_list_h in im_list_2d])
def stichImg(imgpath):
print("[INFO] loading images...")
imagePaths = sorted(list(paths.list_images(imgpath)))
images = []
for imagePath in imagePaths:
image = cv2.imread(imagePath)
images.append(image)
print("[INFO] stitching images...")
width,height,channel = images[0].shape #get sample width and height
#gen blank images to fill up space
blank_image = np.zeros((width,height,channel), np.uint8)
extra = (len(images)%3)+1
for i in range(0,extra):
images.append(blank_image)
#arrange into tiles
tileImg= []
row = []
count = 1
for i in images:
i = cv2.resize(i,(0,0),None,0.6,0.6)
row.append(i)
count = count +1
if count == 3:
tileImg.append(row)
row=[]
count = 1
im_tile = concat_tile(tileImg)
cv2.imwrite(f"{imgpath}/stiched.png", im_tile)
cv2.imshow('results',im_tile)
cv2.waitKey(0)
def stichandshow(img_map,save_path):
for key in img_map:
if len(img_map[key])>1:
actualID = key +1
res = cv2.imwrite(f"{save_path[:-1]}{actualID}.JPG", img_map[key])
#print(img_stats)
stichImg(save_path[:-1])
#os._exit(0)
def displayImage(displayImageList,imgpath):
images = []
for imagePath in displayImageList:
image = cv2.imread(imagePath)
image = cv2.resize(image,(480, 360), interpolation = cv2.INTER_AREA)
images.append(image)
print("[INFO] stitching images...")
#width,height,channel = images[0].shape #get sample width and height
row1 = np.hstack((images[0], images[1],images[2]))
row2 = np.hstack((images[3], images[4],images[5]))
row3 = np.hstack((images[6], images[7],images[8]))
stichedImg = np.vstack((row1, row2,row3))
cv2.imwrite(f"{imgpath}/stiched.png", stichedImg)
im = Image.open(f"{imgpath}/stiched.png")
im.show()
@torch.no_grad()
def run(weights='yolov5s.pt', # model.pt path(s)
source='data/images', # file/dir/URL/glob, 0 for webcam
imgsz=640, # inference size (pixels)
conf_thres=0.25, # confidence threshold
iou_thres=0.45, # NMS IOU threshold
max_det=1000, # maximum detections per image
device='', # cuda device, i.e. 0 or 0,1,2,3 or cpu
view_img=False, # show results
save_txt=False, # save results to *.txt
save_conf=False, # save confidences in --save-txt labels
save_crop=False, # save cropped prediction boxes
nosave=False, # do not save images/videos
classes=None, # filter by class: --class 0, or --class 0 2 3
agnostic_nms=False, # class-agnostic NMS
augment=False, # augmented inference
visualize=False, # visualize features
update=False, # update all models
project='runs/detect', # save results to project/name
name='exp', # save results to project/name
exist_ok=False, # existing project/name ok, do not increment
line_thickness=3, # bounding box thickness (pixels)
hide_labels=False, # hide labels
hide_conf=False, # hide confidences
half=False, # use FP16 half-precision inference
):
#ASSIGN CONFIGS
weights = WEIGHTSPATH # file/dir/URL/glob, 0 for webcam
imgsz = IMGCONF # inference size (pixels)
conf_thres = CONF # confidence threshold
source = SOURCE # file/dir/URL/glob, 0 for webcam
#END OF ASSIGNING CONFIGS
samplingFrames = 9
img_stats_buffer = []
img_stats = {}
img_queue = []
img_queue_path = []
img_map = {}
android_data={}
haveID = False
obstacle_num = 1
lastsentid = -1
displayImageList = [f"{FILELOCATION}/placeholder/img1.jpg",f"{FILELOCATION}/placeholder/img2.jpg",f"{FILELOCATION}/placeholder/img3.jpg",f"{FILELOCATION}/placeholder/img4.jpg",f"{FILELOCATION}/placeholder/img5.jpg",f"{FILELOCATION}/placeholder/img6.jpg",f"{FILELOCATION}/placeholder/img7.jpg",f"{FILELOCATION}/placeholder/img8.jpg",f"{FILELOCATION}/placeholder/img9.jpg"]
displayImage(displayImageList,f"{FILELOCATION}/placeholder")
if(source=='1'):#rpi
print("in rpi")
# while True:
# result = getAndroidData()
# if (result=="start"):
# android_data = getAndroidData()
# runner = Runner(android_data) # android_data is the raw string from android
# instructions, android_coor = runner.run()
# sendData(instructions, "stm")
# sendData(android_coor, "android")
# break
android_data = getAndroidData()
#android_data = "ROBOT,1,2,N;OBSTACLE,1,10,5,S;OBSTACLE,2,18,16,W"
runner = Runner(android_data) # android_data is the raw string from android
instructions, android_coor = runner.run()
print(f"coor:{android_coor} ------ inst: {instructions}")
sendData(instructions, "stm")
print(android_coor)
time.sleep(1)
sendData(android_coor, "android")
save_img = not nosave and not source.endswith('.txt') # save inference images
webcam = source.isnumeric() or source.endswith('.txt') or source.lower().startswith(
('rtsp://', 'rtmp://', 'http://', 'https://'))
usePi = False
if source=='1':
usePi = True
# Directories
save_dir = increment_path(Path(project) / name, exist_ok=exist_ok) # increment run
(save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir
# Initialize
set_logging()
device = select_device(device)
half &= device.type != 'cpu' # half precision only supported on CUDA
# Load model
w = weights[0] if isinstance(weights, list) else weights
classify, suffix = False, Path(w).suffix.lower()
pt, onnx, tflite, pb, saved_model = (suffix == x for x in ['.pt', '.onnx', '.tflite', '.pb', '']) # backend
stride, names = 64, [f'class{i}' for i in range(1000)] # assign defaults
if pt:
model = attempt_load(weights, map_location=device) # load FP32 model
stride = int(model.stride.max()) # model stride
names = model.module.names if hasattr(model, 'module') else model.names # get class names
if half:
model.half() # to FP16
if classify: # second-stage classifier
modelc = load_classifier(name='resnet50', n=2) # initialize
modelc.load_state_dict(torch.load('resnet50.pt', map_location=device)['model']).to(device).eval()
elif onnx:
check_requirements(('onnx', 'onnxruntime'))
import onnxruntime
session = onnxruntime.InferenceSession(w, None)
else: # TensorFlow models
check_requirements(('tensorflow>=2.4.1',))
import tensorflow as tf
if pb: # https://www.tensorflow.org/guide/migrate#a_graphpb_or_graphpbtxt
def wrap_frozen_graph(gd, inputs, outputs):
x = tf.compat.v1.wrap_function(lambda: tf.compat.v1.import_graph_def(gd, name=""), []) # wrapped import
return x.prune(tf.nest.map_structure(x.graph.as_graph_element, inputs),
tf.nest.map_structure(x.graph.as_graph_element, outputs))
graph_def = tf.Graph().as_graph_def()
graph_def.ParseFromString(open(w, 'rb').read())
frozen_func = wrap_frozen_graph(gd=graph_def, inputs="x:0", outputs="Identity:0")
elif saved_model:
model = tf.keras.models.load_model(w)
elif tflite:
interpreter = tf.lite.Interpreter(model_path=w) # load TFLite model
interpreter.allocate_tensors() # allocate
input_details = interpreter.get_input_details() # inputs
output_details = interpreter.get_output_details() # outputs
int8 = input_details[0]['dtype'] == np.uint8 # is TFLite quantized uint8 model
imgsz = check_img_size(imgsz, s=stride) # check image size
ascii = is_ascii(names) # names are ascii (use PIL for UTF-8)
# Dataloader
if usePi:
print("Connecting to rpi....")
#_,frame = image_hub.recv_image()
#im = cv2.imread(frame)
#print(frame.shape)
view_img=True;
dataset = LoadStreams(source, img_size=imgsz, stride=stride, auto=pt)
bs = len(dataset) # batch_size
#image_hub.send_reply(b'OK')
print("Done")
elif webcam:
view_img = check_imshow()
print(f"View img is {view_img}")
cudnn.benchmark = True # set True to speed up constant image size inference
print(f"Source is {source}")
dataset = LoadStreams(source, img_size=imgsz, stride=stride, auto=pt)
bs = len(dataset) # batch_size
else:
dataset = LoadImages(source, img_size=imgsz, stride=stride, auto=pt)
bs = 1 # batch_size
vid_path, vid_writer = [None] * bs, [None] * bs
# Run inference
if pt and device.type != 'cpu':
model(torch.zeros(1, 3, *imgsz).to(device).type_as(next(model.parameters()))) # run once
t0 = time.time()
startTime = None
for path, img, im0s, vid_cap in dataset:
#get obstacle_num
print(f"Getting obstacle number...")
while not haveID:
obstacle_num = getAndroidData()
if not startTime:
startTime = datetime.now()
print(f"obstacle number is :{obstacle_num}")
obstacle_num = int(obstacle_num)
if obstacle_num>0 and obstacle_num <7:
haveID = True
if onnx:
img = img.astype('float32')
else:
img = torch.from_numpy(img).to(device)
img = img.half() if half else img.float() # uint8 to fp16/32
img = img / 255.0 # 0 - 255 to 0.0 - 1.0
if len(img.shape) == 3:
img = img[None] # expand for batch dim
# Inference
t1 = time_sync()
if pt:
visualize = increment_path(save_dir / Path(path).stem, mkdir=True) if visualize else False
pred = model(img, augment=augment, visualize=visualize)[0]
elif onnx:
pred = torch.tensor(session.run([session.get_outputs()[0].name], {session.get_inputs()[0].name: img}))
else: # tensorflow model (tflite, pb, saved_model)
imn = img.permute(0, 2, 3, 1).cpu().numpy() # image in numpy
if pb:
pred = frozen_func(x=tf.constant(imn)).numpy()
elif saved_model:
pred = model(imn, training=False).numpy()
elif tflite:
if int8:
scale, zero_point = input_details[0]['quantization']
imn = (imn / scale + zero_point).astype(np.uint8) # de-scale
interpreter.set_tensor(input_details[0]['index'], imn)
interpreter.invoke()
pred = interpreter.get_tensor(output_details[0]['index'])
if int8:
scale, zero_point = output_details[0]['quantization']
pred = (pred.astype(np.float32) - zero_point) * scale # re-scale
pred[..., 0] *= imgsz[1] # x
pred[..., 1] *= imgsz[0] # y
pred[..., 2] *= imgsz[1] # w
pred[..., 3] *= imgsz[0] # h
pred = torch.tensor(pred)
# NMS
pred = non_max_suppression(pred, conf_thres, iou_thres, classes, agnostic_nms, max_det=max_det)
t2 = time_sync()
# Second-stage classifier (optional)
if classify:
print("runed classifier")
pred = apply_classifier(pred, modelc, img, im0s)
# Process predictions
for i, det in enumerate(pred): # detections per image
if webcam: # batch_size >= 1
p, s, im0, frame = path[i], f'{i}: ', im0s[i].copy(), dataset.count
else:
p, s, im0, frame = path, '', im0s.copy(), getattr(dataset, 'frame', 0)
p = Path(p) # to Path
save_path = str(save_dir / p.name) # img.jpg
txt_path = str(save_dir / 'labels' / p.stem) + ('' if dataset.mode == 'image' else f'_{frame}') # img.txt
s += '%gx%g ' % img.shape[2:] # print string
gn = torch.tensor(im0.shape)[[1, 0, 1, 0]] # normalization gain whwh
imc = im0.copy() if save_crop else im0 # for save_crop
annotator = Annotator(im0, line_width=line_thickness, pil=not ascii)
if len(det):
# Rescale boxes from img_size to im0 size
det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round()
# Print results
for c in det[:, -1].unique():
n = (det[:, -1] == c).sum() # detections per class
s += f"{n} {names[int(c)]}{'s' * (n > 1)}, " # add to string
#remove result, only keep one
maxArea = 0.0
biggestDetect = 0
for *xyxy, conf, cls in reversed(det):
xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh
origin_x = xywh[0]
origin_y = xywh[1]
dwidth=xywh[2]
dheight=xywh[3]
c = int(cls)
area = dwidth*dheight
if(area>maxArea):
maxArea = area;
biggestDetect = c
# Write results
for *xyxy, conf, cls in reversed(det):
c = int(cls)
#if its the selected biggest
if(c==biggestDetect):
if save_txt: # Write to file
xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh
line = (cls, *xywh, conf) if save_conf else (cls, *xywh) # label format
with open(txt_path + '.txt', 'a') as f:
f.write(('%g ' * len(line)).rstrip() % line + '\n')
if save_img or save_crop or view_img: # Add bbox to image
c = int(cls) # integer class
label = None if hide_labels else (names[c] if hide_conf else f'{names[c]} {conf:.2f}')
annotator.box_label(xyxy, f"id:{c+1} - {label}", color=colors(c, True))
if save_crop:
save_one_box(xyxy, imc, file=save_dir / 'crops' / names[c] / f'{p.stem}.jpg', BGR=True)
if c+1==31:
pass
else:
if conf >0:
print(f"{c}-{label}:confidence:{conf},width:{dwidth},height:{dheight},origin x:{origin_x}, origin y:{origin_y}")
img_stats_buffer.append({"id":c,"name":label,"confidence":conf,"width":dwidth,"height":dheight,"origin_x":origin_x,"origin_y":origin_y})
img_queue.append(c)
img_queue_path.append(im0)
# Print time (inference + NMS)
#if(len(pred[0])>1):
# print(f"{pred[0][5]}Confidence: {pred[0][4]}")
#print(f"The 2D-Array is: {det[0]} ,")
#variables for printing
#print(f'{s}Done. ({t2 - t1:.3f}s)')
# Stream results
im0 = annotator.result()
if view_img:
cv2.imshow(str(p), im0)
cv2.waitKey(1) # 1 millisecond
#print(f"{save_path}")
# detectedImage = False;
# for i, det in enumerate(pred):
# #print(f"i:{i},det{det}")
# for *xyxy, conf, cls in reversed(det):
# xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh
# origin_x = xywh[0]
# origin_y = xywh[1]
# dwidth=xywh[2]
# dheight=xywh[3]
# c = int(cls)
# label = None if hide_labels else (names[c] if hide_conf else f'{names[c]} {conf:.2f}')
# if c+1==31:# if detected bulleye send over as 31 with obstacle ID as 100
# continue
# else:
# if conf >0:
# print(f"{c}-{label}:confidence:{conf},width:{dwidth},height:{dheight},origin x:{origin_x}, origin y:{origin_y}")
# img_stats_buffer.append({"id":c,"name":label,"confidence":conf,"width":dwidth,"height":dheight,"origin_x":origin_x,"origin_y":origin_y})
# img_queue.append(c)
# img_queue_path.append(im0)
# if conf>0:
# detectedImage = True;
elements_count = {}
if(len(img_queue)>samplingFrames):#9 successful detection before saving
for element in img_queue:
if element in elements_count:
elements_count[element] += 1
else:
elements_count[element] = 1
max_key = max(elements_count, key=elements_count.get)
if not element in img_map:# not assigned a image yet
for x in range(0,len(img_queue)):
if(img_queue[x]==max_key):
img_map[max_key] = img_queue_path[x]
img_stats[max_key]=img_stats_buffer[x]
#Send max_key+1 id over to android here --------------------------
break
while (len(img_queue)>samplingFrames):# pop the oldest one
img_queue_path.pop(0)
img_queue.pop(0)
img_stats_buffer.pop(0)
#android sending id to
length = len(img_stats)
if length>0:
last_id = list(img_stats.keys())[-1]
if lastsentid!=last_id:
lastsentid = last_id
target_ID = last_id + 1
#android_data["status"] = True
#android_data["msg"] = f"TARGET,{obstacle_num},{target_ID}"
#print(f"{android_data['msg']}")
if source =="1":
print(f"{target_ID} -id {type(target_ID)}-----------")
if (target_ID<31):#remove bulleye
sendData(f"TARGET,{obstacle_num},{target_ID}","android")
#write data and display image
print(f"data sent: TARGET,{obstacle_num},{target_ID}","android")
res = cv2.imwrite(f"{save_path[:-1]}{target_ID}.JPG", img_map[lastsentid])
displayImageList[obstacle_num-1] = f"{save_path[:-1]}{target_ID}.JPG"
displayImage(displayImageList,save_path[:-1])
haveID = False
startTime = None
#stichandshow(img_map,save_path)
endTime = datetime.now()
if startTime:
if((endTime - startTime).total_seconds() >14):
total_secondsss = (endTime - startTime).total_seconds()
print(f"{startTime} :::{endTime}:::: total = {total_secondsss} one cycle ended")
haveID = False
startTime = None
k = cv2.waitKey(30) & 0xFF
if k==27: # Esc key to stop
# for key in img_map:
# if len(img_map[key])>1:
# actualID = key +1
# res = cv2.imwrite(f"{save_path[:-1]}{actualID}.JPG", img_map[key])
# print(img_stats)
# stichImg(save_path[:-1])
os._exit(0)
elif k==-1: # normally -1 returned,so don't print it
continue
# Save results (image with detections)
# if save_img:
# if dataset.mode == 'image':
# cv2.imwrite(save_path, im0)
# else: # 'video' or 'stream'
# if vid_path[i] != save_path: # new video
# vid_path[i] = save_path
# if isinstance(vid_writer[i], cv2.VideoWriter):
# vid_writer[i].release() # release previous video writer
# if vid_cap: # video
# fps = vid_cap.get(cv2.CAP_PROP_FPS)
# w = int(vid_cap.get(cv2.CAP_PROP_FRAME_WIDTH))
# h = int(vid_cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
# else: # stream
# fps, w, h = 30, im0.shape[1], im0.shape[0]
# save_path += '.mp4'
# vid_writer[i] = cv2.VideoWriter(save_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (w, h))
# vid_writer[i].write(im0)
if save_txt or save_img:
s = f"\n{len(list(save_dir.glob('labels/*.txt')))} labels saved to {save_dir / 'labels'}" if save_txt else ''
print(f"Results saved to {colorstr('bold', save_dir)}{s}")
if update:
strip_optimizer(weights) # update model (to fix SourceChangeWarning)
print(f'Done. ({time.time() - t0:.3f}s)')
def parse_opt():
parser = argparse.ArgumentParser()
parser.add_argument('--weights', nargs='+', type=str, default='yolov5s.pt', help='model.pt path(s)')
parser.add_argument('--source', type=str, default='data/images', help='file/dir/URL/glob, 0 for webcam')
parser.add_argument('--imgsz', '--img', '--img-size', nargs='+', type=int, default=[640], help='inference size h,w')
parser.add_argument('--conf-thres', type=float, default=0.25, help='confidence threshold')
parser.add_argument('--iou-thres', type=float, default=0.45, help='NMS IoU threshold')
parser.add_argument('--max-det', type=int, default=1000, help='maximum detections per image')
parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu')
parser.add_argument('--view-img', action='store_true', help='show results')
parser.add_argument('--save-txt', action='store_true', help='save results to *.txt')
parser.add_argument('--save-conf', action='store_true', help='save confidences in --save-txt labels')
parser.add_argument('--save-crop', action='store_true', help='save cropped prediction boxes')
parser.add_argument('--nosave', action='store_true', help='do not save images/videos')
parser.add_argument('--classes', nargs='+', type=int, help='filter by class: --class 0, or --class 0 2 3')
parser.add_argument('--agnostic-nms', action='store_true', help='class-agnostic NMS')
parser.add_argument('--augment', action='store_true', help='augmented inference')
parser.add_argument('--visualize', action='store_true', help='visualize features')
parser.add_argument('--update', action='store_true', help='update all models')
parser.add_argument('--project', default='runs/detect', help='save results to project/name')
parser.add_argument('--name', default='exp', help='save results to project/name')
parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment')
parser.add_argument('--line-thickness', default=3, type=int, help='bounding box thickness (pixels)')
parser.add_argument('--hide-labels', default=False, action='store_true', help='hide labels')
parser.add_argument('--hide-conf', default=False, action='store_true', help='hide confidences')
parser.add_argument('--half', action='store_true', help='use FP16 half-precision inference')
opt = parser.parse_args()
opt.imgsz *= 2 if len(opt.imgsz) == 1 else 1 # expand
return opt
def main(opt):
print(colorstr('detect: ') + ', '.join(f'{k}={v}' for k, v in vars(opt).items()))
check_requirements(exclude=('tensorboard', 'thop'))
run(**vars(opt))
if __name__ == "__main__":
opt = parse_opt()
main(opt)
| 36.510851
| 372
| 0.66278
|
12202e667b29defa5458499a2ce60ef5cd7fa2b3
| 906
|
py
|
Python
|
var/spack/repos/builtin/packages/py-pyfftw/package.py
|
whitfin/spack
|
aabd2be31a511d0e00c1017f7311a421659319d9
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 3
|
2019-06-27T13:26:50.000Z
|
2019-07-01T16:24:54.000Z
|
var/spack/repos/builtin/packages/py-pyfftw/package.py
|
openbiox/spack
|
bb6ec7fb40c14b37e094a860e3625af53f633174
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 75
|
2016-07-27T11:43:00.000Z
|
2020-12-08T15:56:53.000Z
|
var/spack/repos/builtin/packages/py-pyfftw/package.py
|
openbiox/spack
|
bb6ec7fb40c14b37e094a860e3625af53f633174
|
[
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 8
|
2015-10-16T13:51:49.000Z
|
2021-10-18T13:58:03.000Z
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPyfftw(PythonPackage):
"""A pythonic wrapper around FFTW, the FFT library,
presenting a unified interface for all the supported transforms."""
homepage = "http://hgomersall.github.com/pyFFTW"
url = "https://pypi.io/packages/source/p/pyFFTW/pyFFTW-0.10.4.tar.gz"
version('0.11.1', sha256='05ea28dede4c3aaaf5c66f56eb0f71849d0d50f5bc0f53ca0ffa69534af14926')
version('0.10.4', '7fb59450308881bb48d9f178947d950e')
depends_on('fftw')
depends_on('py-setuptools', type='build')
depends_on('py-cython', type='build')
depends_on('py-numpy@1.6:', type=('build', 'run'))
depends_on('py-scipy@0.12.0:', type=('build', 'run'))
| 37.75
| 96
| 0.701987
|
f7a202e1e106b6fa39d8ff5ff10d36cf7f79c861
| 6,519
|
py
|
Python
|
tensorflow_probability/python/math/gradient.py
|
mayou36/probability
|
f185c852146894af6dc02223020413bf26ecdd5c
|
[
"Apache-2.0"
] | 1
|
2020-08-28T21:01:19.000Z
|
2020-08-28T21:01:19.000Z
|
tensorflow_probability/python/math/gradient.py
|
mayou36/probability
|
f185c852146894af6dc02223020413bf26ecdd5c
|
[
"Apache-2.0"
] | 2
|
2021-08-25T16:14:51.000Z
|
2022-02-10T04:47:11.000Z
|
tensorflow_probability/python/math/gradient.py
|
mayou36/probability
|
f185c852146894af6dc02223020413bf26ecdd5c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions for computing gradients."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
__all__ = [
'value_and_gradient',
]
def _prepare_args(xs):
"""Returns a `list` and a `bool` indicating whether args started list-like."""
is_list_like = isinstance(xs, (tuple, list))
if not is_list_like:
xs = [xs]
xs = [
tf.convert_to_tensor(x, dtype_hint=tf.float32, name='x{}'.format(i))
for i, x in enumerate(xs)
]
return xs, is_list_like
def value_and_gradient(f,
xs,
output_gradients=None,
use_gradient_tape=False,
name=None):
"""Computes `f(*xs)` and its gradients wrt to `*xs`.
Args:
f: Python `callable` to be differentiated. If `f` returns a scalar, this
scalar will be differentiated. If `f` returns a tensor or list of tensors,
by default a scalar will be computed by adding all their values to produce
a single scalar. If desired, the tensors can be elementwise multiplied by
the tensors passed as the `dy` keyword argument to the returned gradient
function.
xs: Python list of parameters of `f` for which to differentiate. (Can also
be single `Tensor`.)
output_gradients: A `Tensor` or list of `Tensor`s the same size as the
result `ys = f(*xs)` and holding the gradients computed for each `y` in
`ys`. This argument is forwarded to the underlying gradient implementation
(i.e., either the `grad_ys` argument of `tf.gradients` or the
`output_gradients` argument of `tf.GradientTape.gradient`).
use_gradient_tape: Python `bool` indicating that `tf.GradientTape` should be
used regardless of `tf.executing_eagerly()` status.
Default value: `False`.
name: Python `str` name prefixed to ops created by this function.
Default value: `None` (i.e., `'value_and_gradient'`).
Returns:
y: `y = f(*xs)`.
dydx: Gradient of `y` wrt each of `xs`.
"""
with tf.name_scope(name or 'value_and_gradient'):
xs, is_xs_list_like = _prepare_args(xs)
if tf.executing_eagerly() or use_gradient_tape:
with tf.GradientTape(watch_accessed_variables=False) as tape:
for x in xs:
tape.watch(x)
y = f(*xs)
dydx = tape.gradient(y, xs, output_gradients=output_gradients)
else:
y = f(*xs)
dydx = tf.gradients(ys=y, xs=xs, grad_ys=output_gradients)
if not is_xs_list_like:
dydx = dydx[0]
return y, dydx
def value_and_batch_jacobian(f, xs):
"""Computes the value and batch jacobian of `f(arg)` w.r.t. `arg`.
Args:
f: Python callable, returning a 2D `(batch, n)` shaped `Tensor`.
xs: 2D `(batch, n)`-shaped argument `Tensor`(s). If multiple are provided,
a tuple of jacobians are returned.
Returns:
value: The result of `f(xs)`.
jacobian: A `(batch, n, n)` shaped `Tensor`, `d f(xs) / d xs`, or a tuple
thereof.
"""
xs, is_xs_list_like = _prepare_args(xs)
with tf.GradientTape(persistent=True) as tape:
tape.watch(xs)
result = f(*xs)
try:
jacobian = tuple(tape.batch_jacobian(result, x) for x in xs)
except ValueError: # Fallback to for-loop jacobian.
jacobian = tuple(
tape.batch_jacobian(result, x, experimental_use_pfor=False) for x in xs)
if not is_xs_list_like:
jacobian = jacobian[0]
return result, jacobian
def batch_jacobian(f, xs):
"""Computes the batch jacobian of `f(xs)` w.r.t. `xs`.
Args:
f: Python callable, returning a 2D `(batch, n)` shaped `Tensor`.
xs: 2D `(batch, n)`-shaped argument `Tensor`(s). If multiple are provided,
a tuple of jacobians are returned.
Returns:
jacobian: A `(batch, n, n)` shaped `Tensor`, `d f(xs) / d xs`, or a tuple
thereof.
"""
return value_and_batch_jacobian(f, xs)[1]
JAX_MODE = False # Rewritten by script.
if JAX_MODE:
import jax # pylint: disable=g-import-not-at-top
import jax.numpy as np # pylint: disable=g-import-not-at-top
import numpy as onp # pylint: disable=g-import-not-at-top
def value_and_gradient(f, # pylint: disable=function-redefined
xs,
output_gradients=None,
use_gradient_tape=False, # pylint: disable=unused-argument
name=None): # pylint: disable=unused-argument
"""Computes `f(*xs)` and its gradients wrt to `*xs`."""
xs, is_xs_list_like = _prepare_args(xs)
y, f_vjp = jax.vjp(f, *xs)
if output_gradients is None:
output_gradients = tf.nest.map_structure(np.ones_like, y)
dydx = list(f_vjp(output_gradients))
if not is_xs_list_like:
dydx = dydx[0]
return y, dydx
def value_and_batch_jacobian(f, xs): # pylint: disable=function-redefined
"""JAX implementation of value_and_batch_jacobian."""
xs, is_xs_list_like = _prepare_args(xs)
y, f_vjp = jax.vjp(f, *xs)
# Let `[B, E_1, ..., E_k]` be the shape of `y`, where the first dimension
# is a batch dimension. We construct a basis for the cotangent space
# `[E_1, ..., E_k]`.
size = onp.prod(y.shape[1:])
basis = np.reshape(np.eye(size, dtype=y.dtype),
(1, size,) + y.shape[1:]) # `[1, size, E_1, ..., E_k]`
basis = np.broadcast_to(
basis, y.shape[:1] + basis.shape[1:]) # `[B, size, E_1, ..., E_k]`
jacobian = jax.vmap(f_vjp, in_axes=1, out_axes=1)(basis)
jacobian = [x.reshape(y.shape + x.shape[2:]) for x in jacobian]
if not is_xs_list_like:
jacobian = jacobian[0]
return y, jacobian
def batch_jacobian(f, xs): # pylint: disable=function-redefined
"""Computes the batch jacobian of `f(xs)` w.r.t. `xs`."""
return value_and_batch_jacobian(f, xs)[1]
| 37.039773
| 84
| 0.647645
|
4ef9d827b8a1f1e4aeb385cd5fcf9976c3392ac0
| 696
|
py
|
Python
|
tests/gamestonk_terminal/etf/test_stockanalysis_model.py
|
ProFireDev/GamestonkTerminal
|
3b73898f1ac5dcfaa0ad8e7eb81493f8e48fd3fc
|
[
"MIT"
] | null | null | null |
tests/gamestonk_terminal/etf/test_stockanalysis_model.py
|
ProFireDev/GamestonkTerminal
|
3b73898f1ac5dcfaa0ad8e7eb81493f8e48fd3fc
|
[
"MIT"
] | 1
|
2022-01-15T01:24:24.000Z
|
2022-01-15T01:24:24.000Z
|
tests/gamestonk_terminal/etf/test_stockanalysis_model.py
|
ProFireDev/GamestonkTerminal
|
3b73898f1ac5dcfaa0ad8e7eb81493f8e48fd3fc
|
[
"MIT"
] | 1
|
2021-11-07T20:59:25.000Z
|
2021-11-07T20:59:25.000Z
|
# IMPORTATION STANDARD
# IMPORTATION THIRDPARTY
import pytest
# IMPORTATION INTERNAL
from gamestonk_terminal.etf import stockanalysis_model
@pytest.fixture(scope="module")
def vcr_config():
return {
"filter_headers": [("User-Agent", None)],
}
@pytest.mark.vcr()
def test_get_all_names_symbols(recorder):
result = stockanalysis_model.get_all_names_symbols()
recorder.capture_list(result)
@pytest.mark.vcr()
@pytest.mark.parametrize(
"symbol",
[
"ARKQ",
"ARKW",
],
)
def test_get_etf_overview(recorder, symbol):
result_df = stockanalysis_model.get_etf_overview(symbol)
assert not result_df.empty
recorder.capture(result_df)
| 18.810811
| 60
| 0.712644
|
7a5bfd10503f2e8db80402d8f787cb2ef16c9855
| 4,037
|
py
|
Python
|
Lib/compiler/static/effects.py
|
isabella232/cinder-1
|
428669a9a925287f192ab361226e5a8ca3fb74d9
|
[
"CNRI-Python-GPL-Compatible"
] | 1,886
|
2021-05-03T23:58:43.000Z
|
2022-03-31T19:15:58.000Z
|
Lib/compiler/static/effects.py
|
isabella232/cinder-1
|
428669a9a925287f192ab361226e5a8ca3fb74d9
|
[
"CNRI-Python-GPL-Compatible"
] | 70
|
2021-05-04T23:25:35.000Z
|
2022-03-31T18:42:08.000Z
|
Lib/compiler/static/effects.py
|
isabella232/cinder-1
|
428669a9a925287f192ab361226e5a8ca3fb74d9
|
[
"CNRI-Python-GPL-Compatible"
] | 52
|
2021-05-04T21:26:03.000Z
|
2022-03-08T18:02:56.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. (http://www.facebook.com)
from __future__ import annotations
import ast
from typing import Dict, Optional, Sequence, TYPE_CHECKING
if TYPE_CHECKING:
from .type_binder import TypeBinder
from .types import Value
class NarrowingEffect:
"""captures type narrowing effects on variables"""
def and_(self, other: NarrowingEffect) -> NarrowingEffect:
if other is NoEffect:
return self
return AndEffect(self, other)
def or_(self, other: NarrowingEffect) -> NarrowingEffect:
if other is NoEffect:
return self
return OrEffect(self, other)
def not_(self) -> NarrowingEffect:
return NegationEffect(self)
def apply(
self,
local_types: Dict[str, Value],
local_name_nodes: Optional[Dict[str, ast.Name]] = None,
) -> None:
"""applies the given effect in the target scope. if `local_name_nodes` is passed, populates
it with the underlying name nodes"""
pass
def undo(self, local_types: Dict[str, Value]) -> None:
"""restores the type to its original value"""
pass
def reverse(
self,
local_types: Dict[str, Value],
local_name_nodes: Optional[Dict[str, ast.Name]] = None,
) -> None:
"""applies the reverse of the scope or reverts it if
there is no reverse"""
self.undo(local_types)
class AndEffect(NarrowingEffect):
def __init__(self, *effects: NarrowingEffect) -> None:
self.effects: Sequence[NarrowingEffect] = effects
def and_(self, other: NarrowingEffect) -> NarrowingEffect:
if other is NoEffect:
return self
elif isinstance(other, AndEffect):
return AndEffect(*self.effects, *other.effects)
return AndEffect(*self.effects, other)
def apply(
self,
local_types: Dict[str, Value],
local_name_nodes: Optional[Dict[str, ast.Name]] = None,
) -> None:
for effect in self.effects:
effect.apply(local_types, local_name_nodes)
def undo(self, local_types: Dict[str, Value]) -> None:
"""restores the type to its original value"""
for effect in self.effects:
effect.undo(local_types)
class OrEffect(NarrowingEffect):
def __init__(self, *effects: NarrowingEffect) -> None:
self.effects: Sequence[NarrowingEffect] = effects
def and_(self, other: NarrowingEffect) -> NarrowingEffect:
if other is NoEffect:
return self
elif isinstance(other, OrEffect):
return OrEffect(*self.effects, *other.effects)
return OrEffect(*self.effects, other)
def reverse(
self,
local_types: Dict[str, Value],
local_name_nodes: Optional[Dict[str, ast.Name]] = None,
) -> None:
for effect in self.effects:
effect.reverse(local_types, local_name_nodes)
def undo(self, local_types: Dict[str, Value]) -> None:
"""restores the type to its original value"""
for effect in self.effects:
effect.undo(local_types)
class NoEffect(NarrowingEffect):
def union(self, other: NarrowingEffect) -> NarrowingEffect:
return other
# Singleton instance for no effects
NO_EFFECT = NoEffect()
class NegationEffect(NarrowingEffect):
def __init__(self, negated: NarrowingEffect) -> None:
self.negated = negated
def not_(self) -> NarrowingEffect:
return self.negated
def apply(
self,
local_types: Dict[str, Value],
local_name_nodes: Optional[Dict[str, ast.Name]] = None,
) -> None:
self.negated.reverse(local_types, local_name_nodes)
def undo(self, local_types: Dict[str, Value]) -> None:
self.negated.undo(local_types)
def reverse(
self,
local_types: Dict[str, Value],
local_name_nodes: Optional[Dict[str, ast.Name]] = None,
) -> None:
self.negated.apply(local_types, local_name_nodes)
| 29.467153
| 99
| 0.640327
|
e380f4e7d4696534c488f375775b6cf74c6eaf3d
| 1,393
|
py
|
Python
|
utils/processing.py
|
Void-zack/Unet_for_ISBI_2012_data
|
ba0ee9d353bca518ecc74620baf0396ba7317252
|
[
"MIT"
] | 2
|
2020-06-09T11:03:24.000Z
|
2021-01-28T10:58:10.000Z
|
utils/processing.py
|
ZACKLDHGZ/Unet_for_ISBI_2012_data
|
0298490a768283126ec9873116e8c53e5f9dc3e9
|
[
"MIT"
] | 1
|
2021-02-28T08:20:40.000Z
|
2021-03-02T14:27:22.000Z
|
utils/processing.py
|
ZACKLDHGZ/Unet_for_ISBI_2012_data
|
0298490a768283126ec9873116e8c53e5f9dc3e9
|
[
"MIT"
] | null | null | null |
import numpy as np
from PIL import Image
from PIL import ImageOps
import cv2 as cv
import configparser
config = configparser.RawConfigParser()
config.read('config.txt')
edge = int(config.get('model settings','edge'))
# prep_Raw
# 3D in 4D out
# 0~255 to 0~1
# 256*256 out
def prep_raw(imgs):
Img = []
for img in imgs:
Img.append(cv.resize(img,(edge,edge),cv.INTER_LINEAR))
Img = np.array(Img)
## Histogram Equalization:
# for i in range(len(Img)):
# Img[i] = np.array(ImageOps.equalize(Image.fromarray((Img[i]-(255-Img[i].max()+Img[i].min())/2).astype('uint8'))))
Img = Img.astype(np.float)/255.0
return Img[:,:,:,np.newaxis]
# train_mask
# 3D in 4D out
# 0~255 to 0,1
# 256*256 out
def train_mask(img):
img = test_mask(img)
return img[:,:,:,np.newaxis]
# test_mask
# 3D in 3D out
# 0~255 to 0,1
# 256*256 out
def test_mask(imgs):
Img = []
for img in imgs:
Img.append(cv.resize(img,(edge,edge),cv.INTER_LINEAR))
Img = np.array(Img)
for i in range(len(Img)):
Img[i] = Img[i]/255
Img[i][Img[i] >= 0.5] = 1
Img[i][Img[i] < 0.5] = 0
return Img.astype('uint8')
# plot raw image
def prep_raw_plot(imgs):
Img = []
for img in imgs:
Img.append(cv.resize(img,(edge,edge),cv.INTER_LINEAR))
Img = np.array(Img)
Img = Img.astype(np.float)/255.0
return Img[:,:,:,np.newaxis]
| 24.875
| 123
| 0.61809
|
f2472cf656b78f98f61bd3cfe99de8b8e31f7caa
| 13,766
|
py
|
Python
|
python/openapi_client/model/token_detail_data.py
|
Mastercard/mcapi_oauth_encryption_tutorial
|
0c24f778ad57a867eefd8aad44466a49f3f89826
|
[
"MIT"
] | 26
|
2019-08-15T10:48:16.000Z
|
2022-03-03T21:57:52.000Z
|
python/openapi_client/model/token_detail_data.py
|
Mastercard/mcapi_oauth_encryption_tutorial
|
0c24f778ad57a867eefd8aad44466a49f3f89826
|
[
"MIT"
] | 12
|
2019-12-30T08:36:00.000Z
|
2022-03-29T22:37:50.000Z
|
python/openapi_client/model/token_detail_data.py
|
Mastercard/mcapi_oauth_encryption_tutorial
|
0c24f778ad57a867eefd8aad44466a49f3f89826
|
[
"MIT"
] | 36
|
2019-08-14T14:27:35.000Z
|
2022-02-13T18:02:36.000Z
|
"""
MDES Digital Enablement API
These APIs are designed as RPC style stateless web services where each API endpoint represents an operation to be performed. All request and response payloads are sent in the JSON (JavaScript Object Notation) data-interchange format. Each endpoint in the API specifies the HTTP Method used to access it. All strings in request and response objects are to be UTF-8 encoded. Each API URI includes the major and minor version of API that it conforms to. This will allow multiple concurrent versions of the API to be deployed simultaneously. <br><br> **Authentication** <br><br> Mastercard uses OAuth 1.0a with body hash extension for authenticating the API clients. This requires every request that you send to Mastercard to be signed with an RSA private key. A private-public RSA key pair must be generated consisting of: <br><br> 1. A private key for the OAuth signature for API requests. It is recommended to keep the private key in a password-protected or hardware keystore. <br> 2. A public key is shared with Mastercard during the project setup process through either a certificate signing request (CSR) or the API Key Generator. Mastercard will use the public key to verify the OAuth signature that is provided on every API call.<br> An OAUTH1.0a signer library is available on [GitHub](https://github.com/Mastercard/oauth1-signer-java) <br><br> **Encryption** <br><br> All communications between Issuer web service and the Mastercard gateway is encrypted using TLS. <br><br> **Additional Encryption of Sensitive Data** <br><br> In addition to the OAuth authentication, when using MDES Digital Enablement Service, any PCI sensitive and all account holder Personally Identifiable Information (PII) data must be encrypted. This requirement applies to the API fields containing encryptedData. Sensitive data is encrypted using a symmetric session (one-time-use) key. The symmetric session key is then wrapped with an RSA Public Key supplied by Mastercard during API setup phase (the Customer Encryption Key). <br> Java Client Encryption Library available on [GitHub](https://github.com/Mastercard/client-encryption-java) # noqa: E501
The version of the OpenAPI document: 1.3.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from openapi_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from openapi_client.exceptions import ApiAttributeError
class TokenDetailData(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
('payment_account_reference',): {
'max_length': 29,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'payment_account_reference': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'payment_account_reference': 'paymentAccountReference', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""TokenDetailData - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
payment_account_reference (str): \"The unique account reference assigned to the PAN. Conditionally returned if the Token Requestor has opted to receive PAR and providing PAR is assigned by Mastercard or the Issuer provides PAR in the authorization message response.\" . [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""TokenDetailData - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
payment_account_reference (str): \"The unique account reference assigned to the PAN. Conditionally returned if the Token Requestor has opted to receive PAR and providing PAR is assigned by Mastercard or the Issuer provides PAR in the authorization message response.\" . [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 53.150579
| 2,146
| 0.614848
|
9fde32a2842242c4f8f2f907888ba415c8e7336e
| 1,743
|
py
|
Python
|
tests/aggregation/test_rover_aggregation.py
|
artinmajdi/crowd-kit
|
174e15f256a4929ed71699ffc1797ea87e0e8a99
|
[
"Apache-2.0"
] | null | null | null |
tests/aggregation/test_rover_aggregation.py
|
artinmajdi/crowd-kit
|
174e15f256a4929ed71699ffc1797ea87e0e8a99
|
[
"Apache-2.0"
] | null | null | null |
tests/aggregation/test_rover_aggregation.py
|
artinmajdi/crowd-kit
|
174e15f256a4929ed71699ffc1797ea87e0e8a99
|
[
"Apache-2.0"
] | 1
|
2021-12-24T02:26:57.000Z
|
2021-12-24T02:26:57.000Z
|
import pandas as pd
import pytest
from pandas.testing import assert_series_equal
from crowdkit.aggregation import ROVER
from .data_rover import simple_text_result_rover # noqa: F401
@pytest.fixture
def data_toy():
return pd.DataFrame(
[
['w1', 't1', 'a b c d'],
['w2', 't1', 'b z d e'],
['w3', 't1', 'b c d e f'],
],
columns=['performer', 'task', 'text']
)
@pytest.fixture
def rover_toy_result():
result = pd.Series(['b c d e'], index=['t1'], name='text')
result.index.name = 'task'
return result
def test_rover_aggregation(rover_toy_result, data_toy):
rover = ROVER(tokenizer=lambda x: x.split(' '), detokenizer=lambda x: ' '.join(x))
assert_series_equal(rover_toy_result, rover.fit_predict(data_toy))
@pytest.fixture
def rover_single_overlap_data():
return pd.DataFrame(
[
['w1', 't1', 'a b c d'],
],
columns=['performer', 'task', 'text']
)
@pytest.fixture
def rover_single_overlap_result():
result = pd.Series(['a b c d'], index=['t1'], name='text')
result.index.name = 'task'
return result
def test_rover_single_overlap(rover_single_overlap_data, rover_single_overlap_result):
rover = ROVER(tokenizer=lambda x: x.split(' '), detokenizer=lambda x: ' '.join(x))
assert_series_equal(rover_single_overlap_result, rover.fit_predict(rover_single_overlap_data))
def test_rover_simple_text(simple_text_df, simple_text_result_rover): # noqa F811
rover = ROVER(tokenizer=lambda x: x.split(' '), detokenizer=lambda x: ' '.join(x))
predicted = rover.fit_predict(simple_text_df.rename(columns={'output': 'text'}))
assert_series_equal(predicted, simple_text_result_rover)
| 29.542373
| 98
| 0.667814
|
9055ec34fe9e0cfd9bcf97728eec181a5e4450b9
| 2,861
|
py
|
Python
|
pdf/RoadMap.py
|
Stan-fld/DataProject
|
f545c5725ec25a12f64195a5955ad75c93aad636
|
[
"MIT"
] | 1
|
2022-02-02T12:12:18.000Z
|
2022-02-02T12:12:18.000Z
|
pdf/RoadMap.py
|
Stan-fld/DataProject
|
f545c5725ec25a12f64195a5955ad75c93aad636
|
[
"MIT"
] | null | null | null |
pdf/RoadMap.py
|
Stan-fld/DataProject
|
f545c5725ec25a12f64195a5955ad75c93aad636
|
[
"MIT"
] | null | null | null |
import os
import uuid
import networkx as nx
import numpy as np
from matplotlib import pyplot as plt
from reportlab.lib.units import inch
from reportlab.pdfgen import canvas
from generation.DataGeneration import DataGeneration
def remove_img(fn):
os.remove(f'{fn}.jpg')
def matrix_to_img(matrix, summit) -> str:
M = np.array(matrix)
# Generate the figure
G2 = nx.DiGraph(M)
plt.figure()
options = {
'node_color': 'yellow',
'node_size': 100,
'edge_color': 'tab:grey',
'with_labels': True
}
# Set node size by type
node_sizes = [3000 if x.kind == 1 else 1500 for x in summit]
# Set color map
cmap = ['darkorange' if x.kind == 1 else 'dodgerblue' for x in summit]
# Draw the graph and specify our characteristics
lbl = ['Dépot' if x.kind == 1 else f'Adresse \n{summit.index(x)}' for x in summit]
nx.draw(G2, with_labels=True, node_color=cmap,
node_size=node_sizes, font_size=8, font_weight="bold", width=0.75,
edgecolors='gray', labels={i: lbl[i] for i in range(len(lbl))})
fn = str(uuid.uuid4())[:6]
plt.savefig(f'{fn}.jpg', format='jpg')
plt.close()
return fn
class RoadMap:
file_name = "default_name"
def __init__(self, file_name):
self.file_name = file_name
def generate(self, data: DataGeneration):
c = canvas.Canvas(f"{self.file_name}.pdf")
c.drawString(100, 800, "Feuille de route")
c.drawString(100, 780, "graph de général")
fn = matrix_to_img(data.data_matrix, data.data_summit)
c.drawImage(f'{fn}.jpg', 0, 760 - 4 * inch, height=4 * inch, preserveAspectRatio=True, mask='auto')
remove_img(fn)
offset = 740 - 4 * inch
for smt in data.data_summit:
c.drawString(100, offset, str(smt))
if offset - 20 < 20:
c.showPage()
offset = 800
else:
offset -= 20
for vh in data.data_vehicles:
c.showPage()
c.drawString(100, 800, f"Feuille de route pour la voiture {vh.id}")
offset = 780
idx = 0
for i, stop in enumerate(vh.full_itinerary):
smt = data.data_summit[stop]
if i == 0:
c.drawString(100, offset, f"Stop n° {i} : {smt}")
else:
if smt.id == vh.itinerary[idx] or smt.id == data.warehouse[vh.kind]:
c.drawString(100, offset, f"Stop n° {i} : {smt}")
idx += 1
else:
c.drawString(100, offset, f"Stop n° {i} : {smt.str_as_stopover()}")
if offset - 20 < 20:
c.showPage()
offset = 800
else:
offset -= 20
c.save()
| 32.511364
| 107
| 0.54841
|
545b82fc110d763061286b48f6e49cce94737251
| 972
|
py
|
Python
|
tests/unit/cython/test_types.py
|
allen-munsch/python-driver
|
0f322052ebb38b18aa4a28121b10e59f94b84afc
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/cython/test_types.py
|
allen-munsch/python-driver
|
0f322052ebb38b18aa4a28121b10e59f94b84afc
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/cython/test_types.py
|
allen-munsch/python-driver
|
0f322052ebb38b18aa4a28121b10e59f94b84afc
|
[
"Apache-2.0"
] | null | null | null |
# Copyright DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.unit.cython.utils import cyimport, cythontest
types_testhelper = cyimport('tests.unit.cython.types_testhelper')
import unittest
class TypesTest(unittest.TestCase):
@cythontest
def test_datetype(self):
types_testhelper.test_datetype(self.assertEqual)
@cythontest
def test_date_side_by_side(self):
types_testhelper.test_date_side_by_side(self.assertEqual)
| 32.4
| 74
| 0.770576
|
46343fa05849ad43cd6a587853b854dc88b0d53e
| 5,861
|
py
|
Python
|
faker/providers/cs_CZ/person.py
|
kaflesudip/faker
|
e27fbf6744e730a34e57a3ad747290c8c36656d4
|
[
"MIT"
] | 1
|
2018-06-18T02:30:24.000Z
|
2018-06-18T02:30:24.000Z
|
faker/providers/cs_CZ/person.py
|
kaflesudip/faker
|
e27fbf6744e730a34e57a3ad747290c8c36656d4
|
[
"MIT"
] | null | null | null |
faker/providers/cs_CZ/person.py
|
kaflesudip/faker
|
e27fbf6744e730a34e57a3ad747290c8c36656d4
|
[
"MIT"
] | 1
|
2021-03-27T01:40:50.000Z
|
2021-03-27T01:40:50.000Z
|
# coding=utf-8
from __future__ import unicode_literals
from ..person import Provider as PersonProvider
class Provider(PersonProvider):
formats = (
'{{first_name_male}} {{last_name_male}}',
'{{first_name_male}} {{last_name_male}}',
'{{first_name_male}} {{last_name_male}}',
'{{first_name_male}} {{last_name_male}}',
'{{first_name_male}} {{last_name_male}}',
'{{first_name_female}} {{last_name_female}}',
'{{first_name_female}} {{last_name_female}}',
'{{first_name_female}} {{last_name_female}}',
'{{first_name_female}} {{last_name_female}}',
'{{first_name_female}} {{last_name_female}}',
'{{prefix_male}} {{first_name_male}} {{last_name_male}}',
'{{prefix_female}} {{first_name_female}} {{last_name_female}}',
'{{first_name_male}} {{last_name_male}} {{suffix}}',
'{{first_name_female}} {{last_name_female}} {{suffix}}',
'{{prefix_male}} {{first_name_male}} {{last_name_male}} {{suffix}}',
'{{prefix_female}} {{first_name_female}} {{last_name_female}} {{suffix}}'
)
first_names_male = (
'Adam', 'Alexander', 'Alexandr', 'Aleš', 'Alois', 'Antonín', 'Arnošt', 'Bedřich', 'Bohumil', 'Bohumír',
'Bohuslav', 'Břetislav', 'Dalibor', 'Daniel', 'David', 'Denis', 'Dominik', 'Dušan', 'Eduard', 'Emil',
'Erik', 'Filip', 'František', 'Hynek', 'Igor', 'Ivan', 'Ivo', 'Jakub', 'Jan', 'Jaromír', 'Jaroslav',
'Jindřich', 'Jiří', 'Josef', 'Jozef', 'Ján', 'Kamil', 'Karel', 'Kryštof', 'Ladislav', 'Leoš', 'Libor',
'Lubomír', 'Luboš', 'Ludvík', 'Luděk', 'Lukáš', 'Marcel', 'Marek', 'Marian', 'Martin', 'Matyáš', 'Matěj',
'Michael', 'Michal', 'Milan', 'Miloslav', 'Miloš', 'Miroslav', 'Oldřich', 'Ondřej', 'Otakar', 'Patrik', 'Pavel',
'Peter', 'Petr', 'Přemysl', 'Radek', 'Radim', 'Radomír', 'Radovan', 'René', 'Richard', 'Robert', 'Robin', 'Roman',
'Rostislav', 'Rudolf', 'Samuel', 'Stanislav', 'Tadeáš', 'Tomáš', 'Vasyl', 'Viktor', 'Vilém', 'Vladimír', 'Vladislav',
'Vlastimil', 'Vojtěch', 'Vratislav', 'Václav', 'Vít', 'Vítězslav', 'Zbyněk', 'Zdeněk', 'Šimon', 'Štefan', 'Štěpán'
)
first_names_female = (
'Adéla', 'Alena', 'Alexandra', 'Alice', 'Alžběta', 'Andrea', 'Aneta', 'Anežka', 'Anna', 'Barbora', 'Blanka', 'Blažena',
'Bohumila', 'Božena', 'Dagmar', 'Dana', 'Daniela', 'Danuše', 'Denisa', 'Dominika', 'Drahomíra', 'Eliška', 'Emilie',
'Eva', 'Františka', 'Gabriela', 'Hana', 'Helena', 'Ilona', 'Irena', 'Iva', 'Ivana', 'Iveta', 'Jana', 'Jarmila',
'Jaroslava', 'Jindřiška', 'Jitka', 'Jiřina', 'Julie', 'Kamila', 'Karolína', 'Kateřina', 'Klára', 'Kristina', 'Kristýna',
'Květa', 'Květoslava', 'Ladislava', 'Lenka', 'Libuše', 'Lucie', 'Ludmila', 'Magdalena', 'Magdaléna', 'Marcela',
'Marie', 'Markéta', 'Marta', 'Martina', 'Michaela', 'Milada', 'Milena', 'Miloslava', 'Miluše', 'Miroslava', 'Monika',
'Mária', 'Naděžda', 'Natálie', 'Nela', 'Nikol', 'Nikola', 'Olga', 'Pavla', 'Pavlína', 'Petra', 'Radka', 'Renata',
'Renáta', 'Romana', 'Růžena', 'Sabina', 'Simona', 'Soňa', 'Stanislava', 'Sára', 'Tereza', 'Vendula', 'Veronika',
'Viktorie', 'Vladimíra', 'Vlasta', 'Věra', 'Zdenka', 'Zdeňka', 'Zuzana', 'Štěpánka', 'Šárka', 'Žaneta'
)
last_names_male = (
'Bartoš', 'Beneš', 'Blažek', 'Bláha', 'Doležal', 'Dušek', 'Dvořák', 'Fiala', 'Holub', 'Horák', 'Hájek', 'Jelínek',
'Kadlec', 'Kolář', 'Kopecký', 'Kratochvíl', 'Krejčí', 'Král', 'Kučera', 'Kříž', 'Malý', 'Marek', 'Mareš', 'Mašek',
'Moravec', 'Novotný', 'Novák', 'Němec', 'Pokorný', 'Polák', 'Pospíšil', 'Procházka', 'Růžička', 'Sedláček', 'Soukup',
'Svoboda', 'Urban', 'Vaněk', 'Veselý', 'Vlček', 'Zeman', 'Čermák', 'Černý', 'Říha', 'Šimek', 'Štěpánek', 'Šťastný'
)
last_names_female = (
'Bartošová', 'Benešová', 'Beranová', 'Blažková', 'Bláhová', 'Doležalová', 'Dušková', 'Dvořáková', 'Fialová', 'Holubová',
'Horáková', 'Hájková', 'Jandová', 'Jelínková', 'Kadlecová', 'Kolářová', 'Kopecká', 'Kratochvílová', 'Krejčová',
'Králová', 'Kučerová', 'Křížová', 'Machová', 'Malá', 'Marešová', 'Marková', 'Mašková', 'Moravcová', 'Novotná',
'Nováková', 'Němcová', 'Pokorná', 'Poláková', 'Pospíšilová', 'Procházková', 'Růžičková', 'Sedláčková', 'Soukupová',
'Svobodová', 'Tichá', 'Urbanová', 'Vacková', 'Vaňková', 'Veselá', 'Vlčková', 'Vávrová', 'Zemanová', 'Čermáková',
'Černá', 'Říhová', 'Šimková', 'Štěpánková', 'Šťastná'
)
degrees = ('JUDr.', 'Ing.', 'Bc.', 'Mgr.', 'MUDr.', 'RNDr.' )
prefixes_male = ('pan', ) + degrees
prefixes_female = ('paní', 'slečna', ) + degrees
suffixes = ('CSc.', 'DiS.', 'Ph.D.', 'Th.D.')
@classmethod
def first_name(cls):
return cls.random_element((cls.first_name_male(), cls.first_name_female()))
@classmethod
def last_name(cls):
return cls.random_element((cls.last_name_male(), cls.last_name_female()))
@classmethod
def first_name_male(cls):
return cls.random_element(cls.first_names_male)
@classmethod
def first_name_female(cls):
return cls.random_element(cls.first_names_female)
@classmethod
def last_name_male(cls):
return cls.random_element(cls.last_names_male)
@classmethod
def last_name_female(cls):
return cls.random_element(cls.last_names_female)
@classmethod
def prefix(cls):
return cls.random_element((cls.prefix_male(), cls.prefix_female()))
@classmethod
def prefix_male(cls):
return cls.random_element(cls.prefixes_male)
@classmethod
def prefix_female(cls):
return cls.random_element(cls.prefixes_female)
@classmethod
def suffix(cls):
return cls.random_element(cls.suffixes)
| 51.412281
| 129
| 0.598533
|
66a6744666ce0a3ca1d70f99cf2ca68a098068d6
| 6,314
|
py
|
Python
|
tests/test_config.py
|
NathanNguyen345/user-sync.py
|
273a5c120a5dd97509a7e0ad8ccdcb8067427f99
|
[
"MIT"
] | null | null | null |
tests/test_config.py
|
NathanNguyen345/user-sync.py
|
273a5c120a5dd97509a7e0ad8ccdcb8067427f99
|
[
"MIT"
] | null | null | null |
tests/test_config.py
|
NathanNguyen345/user-sync.py
|
273a5c120a5dd97509a7e0ad8ccdcb8067427f99
|
[
"MIT"
] | null | null | null |
import os
import pytest
import yaml
import shutil
from util import update_dict
from user_sync.config import ConfigFileLoader, ConfigLoader, DictConfig
from user_sync import app
from user_sync.error import AssertionException
def load_ldap_config_options(args):
from user_sync.connector.directory import DirectoryConnector
from user_sync.connector.directory_ldap import LDAPDirectoryConnector
config_loader = ConfigLoader(args)
dc_mod_name = config_loader.get_directory_connector_module_name()
dc_mod = __import__(dc_mod_name, fromlist=[''])
dc = DirectoryConnector(dc_mod)
dc_config_options = config_loader.get_directory_connector_options(dc.name)
caller_config = DictConfig('%s configuration' % dc.name, dc_config_options)
return LDAPDirectoryConnector.get_options(caller_config)
@pytest.fixture
def root_config_file(fixture_dir):
return os.path.join(fixture_dir, 'user-sync-config.yml')
@pytest.fixture
def ldap_config_file(fixture_dir):
return os.path.join(fixture_dir, 'connector-ldap.yml')
@pytest.fixture
def umapi_config_file(fixture_dir):
return os.path.join(fixture_dir, 'connector-umapi.yml')
@pytest.fixture
def tmp_config_files(root_config_file, ldap_config_file, umapi_config_file, tmpdir):
tmpfiles = []
for fname in [root_config_file, ldap_config_file, umapi_config_file]:
basename = os.path.split(fname)[-1]
tmpfile = os.path.join(str(tmpdir), basename)
shutil.copy(fname, tmpfile)
tmpfiles.append(tmpfile)
return tuple(tmpfiles)
@pytest.fixture
def modify_root_config(tmp_config_files):
(root_config_file, _, _) = tmp_config_files
def _modify_root_config(keys, val):
conf = yaml.safe_load(open(root_config_file))
conf = update_dict(conf, keys, val)
yaml.dump(conf, open(root_config_file, 'w'))
return root_config_file
return _modify_root_config
@pytest.fixture
def modify_ldap_config(tmp_config_files):
(_, ldap_config_file, _) = tmp_config_files
def _modify_ldap_config(keys, val):
conf = yaml.safe_load(open(ldap_config_file))
conf = update_dict(conf, keys, val)
yaml.dump(conf, open(ldap_config_file, 'w'))
return ldap_config_file
return _modify_ldap_config
def test_load_root(root_config_file):
"""Load root config file and test for presence of root-level keys"""
config = ConfigFileLoader.load_root_config(root_config_file)
assert isinstance(config, dict)
assert ('adobe_users' in config and 'directory_users' in config and
'logging' in config and 'limits' in config and
'invocation_defaults' in config)
def test_max_adobe_percentage(modify_root_config, cli_args):
root_config_file = modify_root_config(['limits', 'max_adobe_only_users'], "50%")
config = ConfigFileLoader.load_root_config(root_config_file)
assert ('limits' in config and 'max_adobe_only_users' in config['limits'] and
config['limits']['max_adobe_only_users'] == "50%")
args = cli_args({'config_filename': root_config_file})
options = ConfigLoader(args).get_rule_options()
assert 'max_adobe_only_users' in options and options['max_adobe_only_users'] == '50%'
modify_root_config(['limits', 'max_adobe_only_users'], "error%")
with pytest.raises(AssertionException):
ConfigLoader(args).get_rule_options()
def test_additional_groups_config(modify_root_config, cli_args):
addl_groups = [
{"source": r"ACL-(.+)", "target": r"ACL-Grp-(\1)"},
{"source": r"(.+)-ACL", "target": r"ACL-Grp-(\1)"},
]
root_config_file = modify_root_config(['directory_users', 'additional_groups'], addl_groups)
config = ConfigFileLoader.load_root_config(root_config_file)
assert ('additional_groups' in config['directory_users'] and
len(config['directory_users']['additional_groups']) == 2)
args = cli_args({'config_filename': root_config_file})
options = ConfigLoader(args).get_rule_options()
assert addl_groups[0]['source'] in options['additional_groups'][0]['source'].pattern
assert addl_groups[1]['source'] in options['additional_groups'][1]['source'].pattern
def test_twostep_config(tmp_config_files, modify_ldap_config, cli_args):
(root_config_file, ldap_config_file, _) = tmp_config_files
modify_ldap_config(['two_steps_lookup'], {})
args = cli_args({'config_filename': root_config_file})
# test invalid "two_steps_lookup" config
with pytest.raises(AssertionException):
load_ldap_config_options(args)
# test valid "two_steps_lookup" config with "group_member_filter_format" still set
modify_ldap_config(['two_steps_lookup', 'group_member_attribute_name'], 'member')
with pytest.raises(AssertionException):
load_ldap_config_options(args)
# test valid "two_steps_lookup" setup
modify_ldap_config(['two_steps_lookup', 'group_member_attribute_name'], 'member')
modify_ldap_config(['group_member_filter_format'], "")
options = load_ldap_config_options(args)
assert 'two_steps_enabled' in options
assert 'two_steps_lookup' in options
assert 'group_member_attribute_name' in options['two_steps_lookup']
assert options['two_steps_lookup']['group_member_attribute_name'] == 'member'
def test_adobe_users_config(tmp_config_files, modify_root_config, cli_args):
(root_config_file, _, _) = tmp_config_files
args = cli_args({'config_filename': root_config_file})
# test default
config_loader = ConfigLoader(args)
options = config_loader.load_invocation_options()
assert 'adobe_users' in options
assert options['adobe_users'] == ['all']
# test default invocation
modify_root_config(['invocation_defaults', 'adobe_users'], "mapped")
config_loader = ConfigLoader(args)
options = config_loader.load_invocation_options()
assert 'adobe_users' in options
assert options['adobe_users'] == ['mapped']
# test command line param
modify_root_config(['invocation_defaults', 'adobe_users'], "all")
args = cli_args({'config_filename': root_config_file, 'adobe_users': ['mapped']})
config_loader = ConfigLoader(args)
options = config_loader.load_invocation_options()
assert 'adobe_users' in options
assert options['adobe_users'] == ['mapped']
| 38.266667
| 96
| 0.738359
|
fb820d0831a21933167458b3ababb9777ca9f2cf
| 1,314
|
py
|
Python
|
scripts/heading.py
|
benjaminogles/vim-head
|
be3e01b53d314b6f7e0d72a736fe40f38de2cf5f
|
[
"MIT"
] | 3
|
2020-04-13T17:47:05.000Z
|
2020-05-11T17:23:02.000Z
|
scripts/heading.py
|
benjaminogles/vim-head
|
be3e01b53d314b6f7e0d72a736fe40f38de2cf5f
|
[
"MIT"
] | 3
|
2020-04-13T16:51:27.000Z
|
2020-04-13T16:53:54.000Z
|
scripts/heading.py
|
benjaminogles/vim-head
|
be3e01b53d314b6f7e0d72a736fe40f38de2cf5f
|
[
"MIT"
] | null | null | null |
import datetime
def parse_date(date_str):
if not len(date_str):
return None
date_parts = date_str.split(' ')[0].split('-')
return datetime.date(int(date_parts[0]), int(date_parts[1]), int(date_parts[2]))
class Heading:
def __init__(self, line):
self.fields = line.split('|')
self.valid = False
if len(self.fields) == 11:
self.filename = self.fields[0]
self.startlnum = int(self.fields[1])
self.endlnum = self.fields[2]
self.level = int(self.fields[3])
self.keyword = self.fields[4].strip()
self.date = parse_date(self.fields[5].strip())
self.warning = self.fields[6]
self.repeat = self.fields[7]
self.title = self.fields[8]
self.path = self.fields[9]
self.tags = filter(None, map(lambda s: s.strip(), self.fields[10].split(':')))
self.valid = True
def __str__(self):
return '|'.join(self.fields)
def __repr__(self):
return str(self)
def __bool__(self):
return self.valid
KEYWORDS = ['TODO', 'NEXT', 'STARTED', 'WAITING', '|', 'DONE', 'MISSED', 'CANCELLED', 'MEETING']
def from_fields_file(stream):
return filter(None, map(lambda s: Heading(s.strip()), stream.readlines()))
| 31.285714
| 96
| 0.578387
|
a125ef4bcf1dc0c031009764d578d8833f29fbbb
| 61
|
py
|
Python
|
config/static/todo.py
|
cad106uk/market-access-api
|
a357c33bbec93408b193e598a5628634126e9e99
|
[
"MIT"
] | null | null | null |
config/static/todo.py
|
cad106uk/market-access-api
|
a357c33bbec93408b193e598a5628634126e9e99
|
[
"MIT"
] | 51
|
2018-05-31T12:16:31.000Z
|
2022-03-08T09:36:48.000Z
|
config/static/todo.py
|
cad106uk/market-access-api
|
a357c33bbec93408b193e598a5628634126e9e99
|
[
"MIT"
] | 2
|
2019-12-24T09:47:42.000Z
|
2021-02-09T09:36:51.000Z
|
# TODO: What's this folder for?
# Can this be retired?
| 20.333333
| 31
| 0.622951
|
9d9397b0776328f15fc896c82b09ea9ac17fa7bb
| 2,721
|
py
|
Python
|
fairseq/data/token_block_dataset_gap_bert.py
|
liufly/refreader
|
25d371fc08d89174cfdac1c7e29984d8cb3beff2
|
[
"BSD-3-Clause"
] | 19
|
2019-07-18T21:38:38.000Z
|
2020-10-24T09:23:37.000Z
|
fairseq/data/token_block_dataset_gap_bert.py
|
liufly/refreader
|
25d371fc08d89174cfdac1c7e29984d8cb3beff2
|
[
"BSD-3-Clause"
] | 1
|
2019-11-29T02:58:08.000Z
|
2019-12-01T06:11:16.000Z
|
fairseq/data/token_block_dataset_gap_bert.py
|
liufly/refreader
|
25d371fc08d89174cfdac1c7e29984d8cb3beff2
|
[
"BSD-3-Clause"
] | 2
|
2019-12-18T11:37:39.000Z
|
2020-02-04T16:23:20.000Z
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import math
import numpy as np
import torch
from fairseq.data.token_block_dataset import TokenBlockDataset
from fairseq.data.gap_reader import GAP_Record
class TokenBlockGapBertDataset(TokenBlockDataset):
def __init__(self, tokens, sizes, block_size, gap_data, gap_corefs,
gap_bert_weights, break_mode=None, include_targets=False):
super().__init__(tokens, sizes, block_size, break_mode,
include_targets)
self.gap_data = gap_data
self.gap_corefs = gap_corefs
self.gap_bert_weights = gap_bert_weights
def __getitem__(self, index):
s, e = self.slice_indices[index]
token_item = torch.LongTensor(self.tokens[s:e])
assert self.include_targets == True
if s == 0:
source_token = np.concatenate([self.tokens[-1:], self.tokens[0:e - 1]])
else:
source_token = self.tokens[s - 1:e - 1]
seqlen = len(source_token)
assert source_token[0] == 2 # 2 for <eos>
def _increase_offsets(offsets):
return (offsets[0] + 1, offsets[1] + 1)
def _increase_offsets_corefs(corefs):
seqlen = corefs.shape[0]
ret = np.zeros((seqlen, seqlen))
ret[1:, 1:] = corefs[:-1, :-1]
return ret
def _increase_offsets(data):
return GAP_Record(
data.example_id,
data.text,
data.pronoun,
data.pronoun_offset_start + 1,
data.pronoun_offset_end + 1,
data.a,
data.a_offset_start + 1,
data.a_offset_end + 1,
data.a_coref,
data.b,
data.b_offset_start + 1,
data.b_offset_end + 1,
data.b_coref
)
def _increase_offsets_bert(bert_weights):
bert_weights_shape = bert_weights.shape
eos_padding = np.zeros((bert_weights_shape[0], 1, bert_weights_shape[2]))
return np.concatenate([eos_padding, bert_weights], axis=1)
return (
torch.LongTensor(source_token),
_increase_offsets(self.gap_data[index]),
torch.FloatTensor(_increase_offsets_corefs(self.gap_corefs[index])),
torch.FloatTensor(_increase_offsets_bert(self.gap_bert_weights[index])),
token_item,
)
| 34.884615
| 85
| 0.599412
|
5281da28535910aacaa15a03a7c33cf71901d7c5
| 95
|
py
|
Python
|
tests/conftest.py
|
felixonmars/aresponses
|
21799c9c9cf13fa0101519bbcb936d495beeb6ee
|
[
"MIT"
] | 80
|
2017-09-08T15:21:28.000Z
|
2021-01-08T20:41:59.000Z
|
tests/conftest.py
|
felixonmars/aresponses
|
21799c9c9cf13fa0101519bbcb936d495beeb6ee
|
[
"MIT"
] | 42
|
2018-02-23T06:37:26.000Z
|
2021-01-16T18:32:51.000Z
|
tests/conftest.py
|
felixonmars/aresponses
|
21799c9c9cf13fa0101519bbcb936d495beeb6ee
|
[
"MIT"
] | 18
|
2018-02-06T12:10:01.000Z
|
2021-01-16T14:37:20.000Z
|
from aresponses import aresponses
assert aresponses
pytest_plugins = "aiohttp.pytest_plugin"
| 15.833333
| 40
| 0.842105
|
aa19cf53053d3f5803fd6d892a554b782911bc1e
| 4,959
|
py
|
Python
|
sdk/python/pulumi_azure_native/containerservice/latest/list_managed_cluster_access_profile.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/containerservice/latest/list_managed_cluster_access_profile.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/containerservice/latest/list_managed_cluster_access_profile.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'ListManagedClusterAccessProfileResult',
'AwaitableListManagedClusterAccessProfileResult',
'list_managed_cluster_access_profile',
]
warnings.warn("""The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-native:containerservice:listManagedClusterAccessProfile'.""", DeprecationWarning)
@pulumi.output_type
class ListManagedClusterAccessProfileResult:
"""
Managed cluster Access Profile.
"""
def __init__(__self__, id=None, kube_config=None, location=None, name=None, tags=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kube_config and not isinstance(kube_config, str):
raise TypeError("Expected argument 'kube_config' to be a str")
pulumi.set(__self__, "kube_config", kube_config)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="kubeConfig")
def kube_config(self) -> Optional[str]:
"""
Base64-encoded Kubernetes configuration file.
"""
return pulumi.get(self, "kube_config")
@property
@pulumi.getter
def location(self) -> str:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
class AwaitableListManagedClusterAccessProfileResult(ListManagedClusterAccessProfileResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListManagedClusterAccessProfileResult(
id=self.id,
kube_config=self.kube_config,
location=self.location,
name=self.name,
tags=self.tags,
type=self.type)
def list_managed_cluster_access_profile(resource_group_name: Optional[str] = None,
resource_name: Optional[str] = None,
role_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListManagedClusterAccessProfileResult:
"""
Managed cluster Access Profile.
Latest API Version: 2020-03-01.
:param str resource_group_name: The name of the resource group.
:param str resource_name: The name of the managed cluster resource.
:param str role_name: The name of the role for managed cluster accessProfile resource.
"""
pulumi.log.warn("""list_managed_cluster_access_profile is deprecated: The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-native:containerservice:listManagedClusterAccessProfile'.""")
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['resourceName'] = resource_name
__args__['roleName'] = role_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:containerservice/latest:listManagedClusterAccessProfile', __args__, opts=opts, typ=ListManagedClusterAccessProfileResult).value
return AwaitableListManagedClusterAccessProfileResult(
id=__ret__.id,
kube_config=__ret__.kube_config,
location=__ret__.location,
name=__ret__.name,
tags=__ret__.tags,
type=__ret__.type)
| 35.934783
| 234
| 0.650534
|
8ff33ac2add15433ad0b2be211ed2599be6c6773
| 2,834
|
py
|
Python
|
tests/test_tokenization_barthez.py
|
Sara-X/transformers
|
6773fb5dccf88a2d6d250da2cdaaa1fa78a4f5c3
|
[
"Apache-2.0"
] | 172
|
2021-09-14T18:34:17.000Z
|
2022-03-30T06:49:53.000Z
|
tests/test_tokenization_barthez.py
|
Sara-X/transformers
|
6773fb5dccf88a2d6d250da2cdaaa1fa78a4f5c3
|
[
"Apache-2.0"
] | 40
|
2021-09-14T02:26:12.000Z
|
2022-03-29T08:34:04.000Z
|
tests/test_tokenization_barthez.py
|
Sara-X/transformers
|
6773fb5dccf88a2d6d250da2cdaaa1fa78a4f5c3
|
[
"Apache-2.0"
] | 33
|
2021-09-15T07:27:25.000Z
|
2022-03-25T08:30:57.000Z
|
# coding=utf-8
# Copyright 2020 Ecole Polytechnique and HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from transformers import BarthezTokenizer, BarthezTokenizerFast, BatchEncoding
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch
from .test_tokenization_common import TokenizerTesterMixin
@require_tokenizers
@require_sentencepiece
class BarthezTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
tokenizer_class = BarthezTokenizer
rust_tokenizer_class = BarthezTokenizerFast
test_rust_tokenizer = True
def setUp(self):
super().setUp()
tokenizer = BarthezTokenizerFast.from_pretrained("moussaKam/mbarthez")
tokenizer.save_pretrained(self.tmpdirname)
tokenizer.save_pretrained(self.tmpdirname, legacy_format=False)
self.tokenizer = tokenizer
@require_torch
def test_prepare_batch(self):
src_text = ["A long paragraph for summarization.", "Another paragraph for summarization."]
expected_src_tokens = [0, 57, 3018, 70307, 91, 2]
batch = self.tokenizer(
src_text, max_length=len(expected_src_tokens), padding=True, truncation=True, return_tensors="pt"
)
self.assertIsInstance(batch, BatchEncoding)
self.assertEqual((2, 6), batch.input_ids.shape)
self.assertEqual((2, 6), batch.attention_mask.shape)
result = batch.input_ids.tolist()[0]
self.assertListEqual(expected_src_tokens, result)
def test_rust_and_python_full_tokenizers(self):
if not self.test_rust_tokenizer:
return
tokenizer = self.get_tokenizer()
rust_tokenizer = self.get_rust_tokenizer()
sequence = "I was born in 92000, and this is falsé."
tokens = tokenizer.tokenize(sequence)
rust_tokens = rust_tokenizer.tokenize(sequence)
self.assertListEqual(tokens, rust_tokens)
ids = tokenizer.encode(sequence, add_special_tokens=False)
rust_ids = rust_tokenizer.encode(sequence, add_special_tokens=False)
self.assertListEqual(ids, rust_ids)
rust_tokenizer = self.get_rust_tokenizer()
ids = tokenizer.encode(sequence)
rust_ids = rust_tokenizer.encode(sequence)
self.assertListEqual(ids, rust_ids)
| 36.805195
| 109
| 0.732886
|
cad7a8ca63e2bfdb9a35e9768ce18dd97faa6513
| 4,100
|
py
|
Python
|
tests/test_parseutils.py
|
zzl0/litecli
|
cd5d4e0cf9164a147c0c59f6f559347c851db5d6
|
[
"BSD-3-Clause"
] | 1,556
|
2018-08-29T16:01:53.000Z
|
2022-03-30T22:19:01.000Z
|
tests/test_parseutils.py
|
zzl0/litecli
|
cd5d4e0cf9164a147c0c59f6f559347c851db5d6
|
[
"BSD-3-Clause"
] | 121
|
2018-08-24T04:01:35.000Z
|
2022-03-30T09:34:31.000Z
|
tests/test_parseutils.py
|
zzl0/litecli
|
cd5d4e0cf9164a147c0c59f6f559347c851db5d6
|
[
"BSD-3-Clause"
] | 62
|
2018-12-21T12:49:11.000Z
|
2022-03-06T19:26:12.000Z
|
import pytest
from litecli.packages.parseutils import (
extract_tables,
query_starts_with,
queries_start_with,
is_destructive,
)
def test_empty_string():
tables = extract_tables("")
assert tables == []
def test_simple_select_single_table():
tables = extract_tables("select * from abc")
assert tables == [(None, "abc", None)]
def test_simple_select_single_table_schema_qualified():
tables = extract_tables("select * from abc.def")
assert tables == [("abc", "def", None)]
def test_simple_select_multiple_tables():
tables = extract_tables("select * from abc, def")
assert sorted(tables) == [(None, "abc", None), (None, "def", None)]
def test_simple_select_multiple_tables_schema_qualified():
tables = extract_tables("select * from abc.def, ghi.jkl")
assert sorted(tables) == [("abc", "def", None), ("ghi", "jkl", None)]
def test_simple_select_with_cols_single_table():
tables = extract_tables("select a,b from abc")
assert tables == [(None, "abc", None)]
def test_simple_select_with_cols_single_table_schema_qualified():
tables = extract_tables("select a,b from abc.def")
assert tables == [("abc", "def", None)]
def test_simple_select_with_cols_multiple_tables():
tables = extract_tables("select a,b from abc, def")
assert sorted(tables) == [(None, "abc", None), (None, "def", None)]
def test_simple_select_with_cols_multiple_tables_with_schema():
tables = extract_tables("select a,b from abc.def, def.ghi")
assert sorted(tables) == [("abc", "def", None), ("def", "ghi", None)]
def test_select_with_hanging_comma_single_table():
tables = extract_tables("select a, from abc")
assert tables == [(None, "abc", None)]
def test_select_with_hanging_comma_multiple_tables():
tables = extract_tables("select a, from abc, def")
assert sorted(tables) == [(None, "abc", None), (None, "def", None)]
def test_select_with_hanging_period_multiple_tables():
tables = extract_tables("SELECT t1. FROM tabl1 t1, tabl2 t2")
assert sorted(tables) == [(None, "tabl1", "t1"), (None, "tabl2", "t2")]
def test_simple_insert_single_table():
tables = extract_tables('insert into abc (id, name) values (1, "def")')
# sqlparse mistakenly assigns an alias to the table
# assert tables == [(None, 'abc', None)]
assert tables == [(None, "abc", "abc")]
@pytest.mark.xfail
def test_simple_insert_single_table_schema_qualified():
tables = extract_tables('insert into abc.def (id, name) values (1, "def")')
assert tables == [("abc", "def", None)]
def test_simple_update_table():
tables = extract_tables("update abc set id = 1")
assert tables == [(None, "abc", None)]
def test_simple_update_table_with_schema():
tables = extract_tables("update abc.def set id = 1")
assert tables == [("abc", "def", None)]
def test_join_table():
tables = extract_tables("SELECT * FROM abc a JOIN def d ON a.id = d.num")
assert sorted(tables) == [(None, "abc", "a"), (None, "def", "d")]
def test_join_table_schema_qualified():
tables = extract_tables("SELECT * FROM abc.def x JOIN ghi.jkl y ON x.id = y.num")
assert tables == [("abc", "def", "x"), ("ghi", "jkl", "y")]
def test_join_as_table():
tables = extract_tables("SELECT * FROM my_table AS m WHERE m.a > 5")
assert tables == [(None, "my_table", "m")]
def test_query_starts_with():
query = "USE test;"
assert query_starts_with(query, ("use",)) is True
query = "DROP DATABASE test;"
assert query_starts_with(query, ("use",)) is False
def test_query_starts_with_comment():
query = "# comment\nUSE test;"
assert query_starts_with(query, ("use",)) is True
def test_queries_start_with():
sql = "# comment\n" "show databases;" "use foo;"
assert queries_start_with(sql, ("show", "select")) is True
assert queries_start_with(sql, ("use", "drop")) is True
assert queries_start_with(sql, ("delete", "update")) is False
def test_is_destructive():
sql = "use test;\n" "show databases;\n" "drop database foo;"
assert is_destructive(sql) is True
| 31.060606
| 85
| 0.675854
|
7c6ebc9433fb21f283cc4f4fa0a77a899a73121c
| 4,944
|
py
|
Python
|
vitrage/api_handler/apis/template.py
|
openstack/vitrage
|
95b33dbf39b040e23915882a2879c87aec239ca9
|
[
"Apache-2.0"
] | 89
|
2015-09-30T21:42:17.000Z
|
2022-03-28T16:31:19.000Z
|
vitrage/api_handler/apis/template.py
|
openstack/vitrage
|
95b33dbf39b040e23915882a2879c87aec239ca9
|
[
"Apache-2.0"
] | 4
|
2015-12-13T13:06:53.000Z
|
2016-01-03T19:51:28.000Z
|
vitrage/api_handler/apis/template.py
|
openstack/vitrage
|
95b33dbf39b040e23915882a2879c87aec239ca9
|
[
"Apache-2.0"
] | 43
|
2015-11-04T15:54:27.000Z
|
2021-12-10T14:24:03.000Z
|
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from oslo_log import log
from osprofiler import profiler
from vitrage.common.constants import TemplateStatus as TStatus
from vitrage.evaluator.template_db import template_repository as template_repo
from vitrage.evaluator.template_schema_factory import TemplateSchemaFactory
LOG = log.getLogger(__name__)
@profiler.trace_cls("template apis",
info={}, hide_args=False, trace_private=False)
class TemplateApis(object):
FAILED_MSG = 'validation failed'
OK_MSG = 'validation OK'
def __init__(self, notifier=None, db=None):
self.notifier = notifier
self.db = db
def validate_template(self, ctx, templates, template_type, params=None):
LOG.debug("TemplateApis validate_template type: %s content: %s",
template_type, templates)
files_content = [t[1] for t in templates]
paths = [t[0] for t in templates]
results = template_repo.validate_templates(self.db, files_content,
template_type, params)
results = [_to_result(r, p) for r, p in zip(results, paths)]
return json.dumps({'results': results})
def template_versions(self, ctx):
versions = sorted(TemplateSchemaFactory.supported_versions())
# TODO(eyalb) at the moment all are supported
current = max(versions)
return [
{
'version': 'v%s' % version,
'status': 'SUPPORTED' if version < current else 'CURRENT'
}
for version in versions
]
def add_template(self, ctx, templates, template_type, params=None):
"""Signal the evaluator
A new template has been added to the database with a status of
LOADING that needs to be handled.
"""
LOG.debug("TemplateApis add_template type: %s content: %s params: %s",
template_type, templates, params)
files_content = [t[1] for t in templates]
db_rows = template_repo.add_templates_to_db(self.db, files_content,
template_type, params)
if self._is_evaluator_reload_required(db_rows):
LOG.info("Adding templates..")
self.notifier.notify("add template", {'template_action': 'add'})
return [_db_template_to_dict(r) for r in db_rows]
def _is_evaluator_reload_required(self, db_rows):
"""Is evaluator reload required
If all the templates have error status, no need to reload evaluators
"""
return any([True for t in db_rows if t.status != TStatus.ERROR])
def delete_template(self, ctx, uuids):
"""Signal the evaluator
A template status has been changed to DELETING.
"""
db = self.db
if type(uuids) != list:
uuids = [uuids]
LOG.info("Deleting templates %s ", uuids)
templates = [t for _id in uuids for t in db.templates.query(uuid=_id)
if t.status != TStatus.DELETED]
if not templates:
return
for t in templates:
if t.status == TStatus.ERROR:
db.templates.update(t.uuid, "status", TStatus.DELETED)
else:
db.templates.update(t.uuid, "status", TStatus.DELETING)
if self._is_evaluator_reload_required(templates):
self.notifier.notify("delete template",
{'template_action': 'delete'})
def _to_result(result, template_path):
if result.is_valid_config:
return {
'file path': template_path,
'status': TemplateApis.OK_MSG,
'description': 'Template validation',
'message': str(result.comment),
'status code': result.status_code
}
else:
return {
'file path': template_path,
'status': TemplateApis.FAILED_MSG,
'description': result.description,
'message': str(result.comment),
'status code': result.status_code
}
def _db_template_to_dict(template):
return {
"uuid": template.uuid,
"name": template.name,
"status": template.status,
"date": str(template.created_at),
"status details": template.status_details,
"type": template.template_type,
}
| 35.826087
| 78
| 0.621359
|
bb37670832e404684ad95ab36f56f0dc7e848bc2
| 6,295
|
py
|
Python
|
masakarimonitors/tests/unit/ha/test_masakari.py
|
iorchard/masakari-monitors
|
bcf6129798a821975ab22cff56c791c81883f5da
|
[
"Apache-2.0"
] | null | null | null |
masakarimonitors/tests/unit/ha/test_masakari.py
|
iorchard/masakari-monitors
|
bcf6129798a821975ab22cff56c791c81883f5da
|
[
"Apache-2.0"
] | null | null | null |
masakarimonitors/tests/unit/ha/test_masakari.py
|
iorchard/masakari-monitors
|
bcf6129798a821975ab22cff56c791c81883f5da
|
[
"Apache-2.0"
] | null | null | null |
# Copyright(c) 2017 Nippon Telegraph and Telephone Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import testtools
from unittest import mock
import uuid
import eventlet
from keystoneauth1 import loading as ks_loading
from openstack import connection
from openstack import exceptions
from oslo_utils import timeutils
import masakarimonitors.conf
from masakarimonitors.ha import masakari
from masakarimonitors.objects import event_constants as ec
CONF = masakarimonitors.conf.CONF
PROFILE_TYPE = "ha"
PROFILE_NAME = "masakari"
class FakeResponse(object):
def __init__(self, status_code=200, headers=None):
self.status_code = status_code
self.headers = {
'content-type': 'application/json',
'x-openstack-request-id': uuid.uuid4().hex,
}
class TestSendNotification(testtools.TestCase):
def setUp(self):
super(TestSendNotification, self).setUp()
self.api_retry_max = 3
self.api_retry_interval = 1
self.event = {
'notification': {
'type': ec.EventConstants.TYPE_COMPUTE_HOST,
'hostname': 'compute-node1',
'generated_time': timeutils.utcnow(),
'payload': {
'event': ec.EventConstants.EVENT_STOPPED,
'cluster_status': 'OFFLINE',
'host_status': ec.EventConstants.HOST_STATUS_NORMAL
}
}
}
@mock.patch.object(connection, 'Connection')
@mock.patch.object(ks_loading, 'load_session_from_conf_options')
@mock.patch.object(ks_loading, 'load_auth_from_conf_options')
def test_send_notification(
self, mock_auth, mock_session, mock_connection):
mock_conn = mock.Mock()
mock_conn.instance_ha.return_value = mock.Mock()
mock_conn.instance_ha.create_notification.return_value = mock.Mock()
mock_connection.return_value = mock_conn
notifier = masakari.SendNotification()
notifier.send_notification(
self.api_retry_max, self.api_retry_interval, self.event)
mock_conn.instance_ha.create_notification.assert_called_once_with(
type=self.event['notification']['type'],
hostname=self.event['notification']['hostname'],
generated_time=self.event['notification']['generated_time'],
payload=self.event['notification']['payload'])
mock_auth.assert_called_once_with(CONF, 'api')
mock_session.assert_called_once_with(CONF, 'api',
auth=mock_auth.return_value)
@mock.patch.object(connection, 'Connection')
@mock.patch.object(ks_loading, 'load_session_from_conf_options')
@mock.patch.object(ks_loading, 'load_auth_from_conf_options')
def test_send_notification_409_error(
self, mock_auth, mock_session, mock_connection):
mock_conn = mock.Mock()
mock_conn.instance_ha.return_value = mock.Mock()
mock_conn.instance_ha.create_notification.return_value = mock.Mock()
mock_connection.return_value = mock_conn
# TODO(samP): Remove attribute check and else case if
# openstacksdk is bumped up from '>=0.9.19' to '>=0.10.0'
# in global-requirements.
if hasattr(exceptions.HttpException(), 'status_code'):
response = FakeResponse(status_code=409)
status_ex = exceptions.HttpException(response=response)
else:
status_ex = exceptions.HttpException(http_status=409)
mock_conn.instance_ha.create_notification.side_effect = status_ex
notifier = masakari.SendNotification()
notifier.send_notification(
self.api_retry_max, self.api_retry_interval, self.event)
mock_conn.instance_ha.create_notification.assert_called_once_with(
type=self.event['notification']['type'],
hostname=self.event['notification']['hostname'],
generated_time=self.event['notification']['generated_time'],
payload=self.event['notification']['payload'])
@mock.patch.object(eventlet.greenthread, 'sleep')
@mock.patch.object(connection, 'Connection')
@mock.patch.object(ks_loading, 'load_session_from_conf_options')
@mock.patch.object(ks_loading, 'load_auth_from_conf_options')
def test_send_notification_500_error(
self, mock_auth, mock_session, mock_connection, mock_sleep):
mock_conn = mock.Mock()
mock_conn.instance_ha.return_value = mock.Mock()
mock_conn.instance_ha.create_notification.return_value = mock.Mock()
mock_connection.return_value = mock_conn
# TODO(samP): Remove attribute check and else case if
# openstacksdk is bumped up from '>=0.9.19' to '>=0.10.0'
# in global-requirements.
if hasattr(exceptions.HttpException(), 'status_code'):
response = FakeResponse(status_code=500)
status_ex = exceptions.HttpException(response=response)
else:
status_ex = exceptions.HttpException(http_status=500)
mock_conn.instance_ha.create_notification.side_effect = status_ex
mock_sleep.return_value = None
notifier = masakari.SendNotification()
notifier.send_notification(
self.api_retry_max, self.api_retry_interval, self.event)
mock_conn.instance_ha.create_notification.assert_called_with(
type=self.event['notification']['type'],
hostname=self.event['notification']['hostname'],
generated_time=self.event['notification']['generated_time'],
payload=self.event['notification']['payload'])
self.assertEqual(self.api_retry_max + 1,
mock_conn.instance_ha.create_notification.call_count)
| 40.612903
| 78
| 0.6834
|
b3a4e339913da4deeea90c44551d0d02e13cf61c
| 1,242
|
bzl
|
Python
|
packages/typescript/internal/internal_ts_repositories.bzl
|
kriswuollett/rules_nodejs
|
5798eeeda78c8acc2ebc2f24a41aca33164a972f
|
[
"Apache-2.0"
] | 645
|
2017-08-22T22:18:51.000Z
|
2022-03-31T11:50:53.000Z
|
packages/typescript/internal/internal_ts_repositories.bzl
|
bolitt/rules_nodejs
|
ba9f82103c6122bb316614734489e44552d3d266
|
[
"Apache-2.0"
] | 2,172
|
2017-08-26T23:52:39.000Z
|
2022-03-31T23:51:29.000Z
|
packages/typescript/internal/internal_ts_repositories.bzl
|
bolitt/rules_nodejs
|
ba9f82103c6122bb316614734489e44552d3d266
|
[
"Apache-2.0"
] | 570
|
2017-08-24T19:57:44.000Z
|
2022-03-29T12:09:04.000Z
|
# Copyright 2019 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper function to setup @bazel/typescript dev dependencies.
"""
load("@bazel_gazelle//:deps.bzl", "go_repository")
def ts_setup_dev_workspace():
"""
Setup the toolchain needed for local development, but not needed by users.
"""
go_repository(
name = "com_github_kylelemons_godebug",
commit = "9ff306d4fbead574800b66369df5b6144732d58e", # v1.1.0
importpath = "github.com/kylelemons/godebug",
)
go_repository(
name = "com_github_mattn_go_isatty",
commit = "504425e14f742f1f517c4586048b49b37f829c8e", # v0.0.14
importpath = "github.com/mattn/go-isatty",
)
| 34.5
| 78
| 0.719002
|
785b67a800a95eec1515b137a0fbd99bf5035cca
| 182
|
py
|
Python
|
HQSmokeTests/UserInputs/generateUserInputs.py
|
dimagi/dimagi-qa
|
60d1dc1192b070f4d47d13ec5d1adb0ce07e1373
|
[
"BSD-3-Clause"
] | null | null | null |
HQSmokeTests/UserInputs/generateUserInputs.py
|
dimagi/dimagi-qa
|
60d1dc1192b070f4d47d13ec5d1adb0ce07e1373
|
[
"BSD-3-Clause"
] | 4
|
2021-03-16T12:23:29.000Z
|
2022-01-13T07:17:46.000Z
|
HQSmokeTests/UserInputs/generateUserInputs.py
|
dimagi/dimagi-qa
|
60d1dc1192b070f4d47d13ec5d1adb0ce07e1373
|
[
"BSD-3-Clause"
] | 2
|
2022-01-11T07:44:04.000Z
|
2022-02-11T07:20:42.000Z
|
import random
import string
chars = string.ascii_lowercase + string.digits
random_string = ''.join(random.choices(chars, k=6))
def fetch_random_string():
return random_string
| 18.2
| 51
| 0.769231
|
3de6d9726e67cefd8ec8899bd604e5cfd0eb7aec
| 2,232
|
py
|
Python
|
ToyCustomerCalculator.py
|
WMUcaleb/Toy-Store-Customer-Calculation
|
9e3a2c438b984bf8311c7191c5bef642d7e862e7
|
[
"Apache-2.0"
] | null | null | null |
ToyCustomerCalculator.py
|
WMUcaleb/Toy-Store-Customer-Calculation
|
9e3a2c438b984bf8311c7191c5bef642d7e862e7
|
[
"Apache-2.0"
] | null | null | null |
ToyCustomerCalculator.py
|
WMUcaleb/Toy-Store-Customer-Calculation
|
9e3a2c438b984bf8311c7191c5bef642d7e862e7
|
[
"Apache-2.0"
] | null | null | null |
from abc import ABC
from abc import abstractmethod
class Toy(ABC):
def __init__(self, age):
self.age = age
@abstractmethod
def get_value(self):
pass
def get_toy(self):
return self.toy
def get_price(self):
return 0
class StuffedDoll(Toy):
def __init__(self, age, num_eyes):
self.age = age
self.num_eyes = num_eyes
self.type = "StuffedDoll"
def get_value(self):
if self.age <= 20:
return 20
elif self.num_eyes == 2:
return 40
elif self.num_eyes == 1:
return 15
else:
return 10
class Truck(Toy):
def __init__(self, age, num_wheels):
self.age = age
self.num_wheels = num_wheels
self.type = "Truck"
def get_value(self):
if self.age >= 20:
return 50
elif self.num_wheels == 4:
return 50
elif self.num_wheels == 3:
return 20
elif self.num_wheels == 2:
return 15
elif self.num_wheels == 1:
return 10
elif self.num_wheels == 0:
return 5
else:
return 0
class Ball(Toy):
def __init__(self, age, is_worn):
self.age = age
self.is_worn = is_worn
self.type = "Ball"
def get_value(self):
if self.age <= 40:
return 15
elif self.is_worn == True:
return 10
elif self.is_worn == False:
return 35
else:
return 0
class Collector:
def __init__(self, name):
self.name = name
self.inventory = []
def get_toy(self):
return self.inventory
def add_toy(self, Toy):
self.inventory.append(Toy)
return
def get_inventory_apraisal(self):
return sum([i.get_value() for i in self.inventory])
def main():
janelle = Collector("Janelle")
toy1 = Ball(13, True)
toy2 = StuffedDoll(30, 1)
toy3 = Ball(40, False)
janelle.add_toy(toy1)
janelle.add_toy(toy2)
janelle.add_toy(toy3)
print("Total estimation of toy collection investment is: $"+janelle.get_inventory_apraisal().__str__())
main()
| 22.77551
| 107
| 0.548387
|
424ac13a436272ce050f7ad97726d8f348ce40de
| 1,122
|
py
|
Python
|
RecoPixelVertexing/Configuration/python/RecoPixelVertexing_cff.py
|
Ror5ch/cmssw
|
012c3bbaca13d98c83fb91c7f0c9bf3ee737aee4
|
[
"Apache-2.0"
] | 4
|
2020-06-27T23:27:21.000Z
|
2020-11-19T09:17:01.000Z
|
RecoPixelVertexing/Configuration/python/RecoPixelVertexing_cff.py
|
Ror5ch/cmssw
|
012c3bbaca13d98c83fb91c7f0c9bf3ee737aee4
|
[
"Apache-2.0"
] | 524
|
2018-01-29T15:50:45.000Z
|
2021-08-04T14:03:21.000Z
|
RecoPixelVertexing/Configuration/python/RecoPixelVertexing_cff.py
|
Ror5ch/cmssw
|
012c3bbaca13d98c83fb91c7f0c9bf3ee737aee4
|
[
"Apache-2.0"
] | 7
|
2018-02-19T11:17:13.000Z
|
2020-10-12T21:57:00.000Z
|
import FWCore.ParameterSet.Config as cms
from RecoPixelVertexing.PixelTrackFitting.PixelTracks_cff import *
#
# for STARTUP ONLY use try and use Offline 3D PV from pixelTracks, with adaptive vertex
#
from RecoPixelVertexing.PixelVertexFinding.PixelVertexes_cff import *
#from RecoVertex.PrimaryVertexProducer.OfflinePixel3DPrimaryVertices_cfi import *
recopixelvertexingTask = cms.Task(pixelTracksTask,pixelVertices)
recopixelvertexing = cms.Sequence(recopixelvertexingTask)
from Configuration.ProcessModifiers.gpu_cff import gpu
from RecoPixelVertexing.PixelVertexFinding.pixelVertexCUDA_cfi import pixelVertexCUDA
from RecoPixelVertexing.PixelVertexFinding.pixelVertexSoA_cfi import pixelVertexSoA
from RecoPixelVertexing.PixelVertexFinding.pixelVertexFromSoA_cfi import pixelVertexFromSoA as _pixelVertexFromSoA
_pixelVertexingCUDATask = cms.Task(pixelTracksTask,pixelVertexCUDA,pixelVertexSoA,pixelVertices)
# pixelVertexSoAonCPU = pixelVertexCUDA.clone()
# pixelVertexSoAonCPU.onGPU = False;
gpu.toReplaceWith(pixelVertices,_pixelVertexFromSoA)
gpu.toReplaceWith(recopixelvertexingTask,_pixelVertexingCUDATask)
| 44.88
| 114
| 0.88057
|
9c8ce141880ba08c70969769a99f1f491b6945f5
| 2,856
|
py
|
Python
|
tc/graphene/python_worker/avalon_worker/receive_request.py
|
ikegawa-koshi/avalon
|
09ccad29f953341078e767053646f41c8c800237
|
[
"Apache-2.0"
] | 127
|
2019-10-25T08:43:26.000Z
|
2022-03-20T15:33:32.000Z
|
tc/graphene/python_worker/avalon_worker/receive_request.py
|
ikegawa-koshi/avalon
|
09ccad29f953341078e767053646f41c8c800237
|
[
"Apache-2.0"
] | 275
|
2019-10-24T23:36:21.000Z
|
2022-01-24T20:38:07.000Z
|
tc/graphene/python_worker/avalon_worker/receive_request.py
|
ikegawa-koshi/avalon
|
09ccad29f953341078e767053646f41c8c800237
|
[
"Apache-2.0"
] | 110
|
2019-10-30T07:09:25.000Z
|
2022-01-28T09:40:44.000Z
|
#!/usr/bin/python3
# Copyright 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import zmq
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stdout))
# -------------------------------------------------------------------------
class ZmqSocket():
"""
ZMQ socker to receive Work Order Request and send Response.
"""
# -------------------------------------------------------------------------
def __init__(self, zmq_url, wo_processor):
"""
Constructor for ZmqSocket.
"""
self.wo_processor = wo_processor
self.zmq_url = zmq_url
# -------------------------------------------------------------------------
def start_zmq_listener(self):
"""
This function binds to the port configured for zmq and
then indefinitely processes work order requests received
over the zmq connection. It terminates only when an
exception occurs.
"""
# Binding with ZMQ Port
try:
context = zmq.Context()
socket = context.socket(zmq.REP)
socket.bind(self.zmq_url)
logger.info("Bind to zmq port")
except Exception as ex:
logger.exception("Failed to bind socket" +
"shutting down: " + str(ex))
# Process requests
while True:
try:
# Wait for the next request
logger.info("waiting for next request")
msg = socket.recv_string(flags=0, encoding='utf-8')
logger.info("Received request: {}".format(msg))
result = self.wo_processor.process_work_order(msg)
if result:
logger.info("Sent response: {}".format(result))
socket.send_string(result, flags=0, encoding='utf-8')
else:
msg = "Work order result is empty"
logger.info("Sent response: {}".format(msg))
socket.send_string(msg, flags=0, encoding='utf-8')
except Exception as ex:
logger.error("Error while processing work-order: " + str(ex))
break
# -------------------------------------------------------------------------
| 35.7
| 77
| 0.543768
|
24ce57e6e15ee7b3cfab2d7c5198e0cf66aeda83
| 1,763
|
py
|
Python
|
__init__.py
|
Lavton/outlays
|
0e1a9df3672c26d47a38e56b0fed5f2da4e468d8
|
[
"MIT"
] | 1
|
2017-11-09T20:33:25.000Z
|
2017-11-09T20:33:25.000Z
|
__init__.py
|
Lavton/outlays
|
0e1a9df3672c26d47a38e56b0fed5f2da4e468d8
|
[
"MIT"
] | null | null | null |
__init__.py
|
Lavton/outlays
|
0e1a9df3672c26d47a38e56b0fed5f2da4e468d8
|
[
"MIT"
] | null | null | null |
import logging
import time
import os
logging.basicConfig(format=u'[%(asctime)s] %(filename)s[LINE:%(lineno)d, FUNC:%(funcName)s]# %(levelname)-8s %(message)s',
level=logging.INFO, filename=os.path.join(os.path.dirname(__file__), "main.log"))
import telegram_communicator
from telegram_communicator import MessageType
import json_worker
import text_worker
import timey_wimey
import sql_communicator
import config
# we send statistic to the user at the end
(total_saved_bills, total_saved_sum, total_saved_items) = (0, 0, 0)
for m_type, m_date, message in telegram_communicator.get_updates():
res = (0, 0, 0)
if m_type == MessageType.JSON:
res = json_worker.save(message)
if m_type == MessageType.TEXT:
res = text_worker.save(m_date, message)
total_saved_bills += res[0]
total_saved_sum += res[1]
total_saved_items += res[2]
print(m_type)
if total_saved_bills:
today_sum = sql_communicator.get_total_sum_from_date(
timey_wimey.get_timestamp_from_date(
timey_wimey.get_begin_of_day()
))
week_sum = sql_communicator.get_total_sum_from_date(
timey_wimey.get_timestamp_from_date(
timey_wimey.get_begin_of_week()
))
month_sum = sql_communicator.get_total_sum_from_date(
timey_wimey.get_timestamp_from_date(
timey_wimey.get_begin_of_month()
))
if config.debug_mode:
telegram_communicator.send_message("DEBUG MODE")
time.sleep(0.5)
telegram_communicator.send_message("""Обработано новых чеков: {}
на сумму {}
(наименований - {} шт.)
____________
с начала дня: {}
с начала недели: {}
с начала месяца: {}
""".format(total_saved_bills, total_saved_sum//100, total_saved_items, today_sum, week_sum, month_sum))
| 32.648148
| 124
| 0.724334
|
f40f265575c99149c1225c549b99a1df03ea199e
| 4,159
|
py
|
Python
|
coto/clients/federation.py
|
jjalonsoc/coto
|
ef80777e6447ac5b78dee77f3132ef0b7fc51387
|
[
"Apache-2.0"
] | null | null | null |
coto/clients/federation.py
|
jjalonsoc/coto
|
ef80777e6447ac5b78dee77f3132ef0b7fc51387
|
[
"Apache-2.0"
] | null | null | null |
coto/clients/federation.py
|
jjalonsoc/coto
|
ef80777e6447ac5b78dee77f3132ef0b7fc51387
|
[
"Apache-2.0"
] | null | null | null |
from furl import furl
import json
import requests
from . import BaseClient
import os
FEDERATION_SIGNIN_URL = os.environ.get('FEDERATION_SIGNIN_URL', 'https://signin.aws.amazon.com/federation')
FEDERATION_DESTINATION = os.environ.get('FEDERATION_DESTINATION', 'https://console.aws.amazon.com/')
class Client(BaseClient):
REQUIRES_AUTHENTICATION = False
def __init__(self, session):
super().__init__(session)
def signin(self, boto3_session):
"""
Signin using a boto3 session.
This method uses the federation endpoint to obtain a signin token using
the credentials in your boto3 session. The signin token is then used
to signin into the AWS Management Console.
Although possible, you are not encouraged to call this method directly,
instead follow the following example.
Example:
.. code-block:: python
import boto3
import coto
session = coto.Session(
boto3_session=boto3.Session()
)
Request Syntax:
.. code-block:: python
response = client.signin(
boto3_session=boto3.session.Session,
)
Args:
boto3_session (boto3.session.Session): The boto3 session to use as
provider for AWS credentials.
Returns:
bool: Signin succeeded.
"""
r = self.session()._get(self.get_signin_url(boto3_session))
if r.status_code != 200:
raise Exception("failed session signin")
self.session().authenticated = True
return True
def get_signin_url(self, boto3_session):
"""
Signin using a boto3 session.
This method uses the federation endpoint to obtain a signin token using
the credentials in your boto3 session. The signin token is then used
to signin into the AWS Management Console.
Although possible, you are not encouraged to call this method directly,
instead follow the following example.
Example:
.. code-block:: python
import boto3
import coto
session = coto.Session(
boto3_session=boto3.Session()
)
Request Syntax:
.. code-block:: python
response = client.signin(
boto3_session=boto3.session.Session,
)
Args:
boto3_session (boto3.session.Session): The boto3 session to use as
provider for AWS credentials.
Returns:
bool: Signin succeeded.
"""
url = furl(FEDERATION_SIGNIN_URL)
url.args['Action'] = "login"
url.args['Issuer'] = None
url.args['Destination'] = FEDERATION_DESTINATION
url.args['SigninToken'] = self.get_signin_token(boto3_session)
return url.url
def get_signin_token(self, boto3_session):
"""
Obtain a signin token for a boto3 session.
This method uses the federation endpoint to obtain a signin token using
the credentials in your boto3 session.
Request Syntax:
.. code-block:: python
response = client.get_signin_token(
boto3_session=boto3.session.Session,
)
Args:
boto3_session (boto3.session.Session): The boto3 session to use as
provider for AWS credentials.
Returns:
str: Signin token.
"""
credentials = boto3_session.get_credentials()
url = FEDERATION_SIGNIN_URL
response = self.session()._get(
url,
params={
"Action":
"getSigninToken",
"Session":
json.dumps({
"sessionId": credentials.access_key,
"sessionKey": credentials.secret_key,
"sessionToken": credentials.token,
})
}
)
return json.loads(response.text)["SigninToken"]
| 29.288732
| 107
| 0.573455
|
d7da60055b9c68fb02542c4ba157f58b13a86799
| 859
|
py
|
Python
|
2021/20_2/solution_test.py
|
budavariam/advent_of_code
|
0903bcbb0df46371b6a340ca2be007dce6470c66
|
[
"MIT"
] | null | null | null |
2021/20_2/solution_test.py
|
budavariam/advent_of_code
|
0903bcbb0df46371b6a340ca2be007dce6470c66
|
[
"MIT"
] | null | null | null |
2021/20_2/solution_test.py
|
budavariam/advent_of_code
|
0903bcbb0df46371b6a340ca2be007dce6470c66
|
[
"MIT"
] | 1
|
2022-02-11T13:14:50.000Z
|
2022-02-11T13:14:50.000Z
|
""" Advent of code 2021 day 20 / 2 """
import unittest
from solution import solution
class MyTest(unittest.TestCase):
"""Unist tests for actual day"""
def test_basic(self):
""" Test from the task """
self.assertEqual(solution("""..#.#..#####.#.#.#.###.##.....###.##.#..###.####..#####..#....#..#..##..###..######.###...####..#..#####..##..#.#####...##.#.#..#.##..#.#......#.###.######.###.####...#.##.##..#..#..#####.....#.#....###..#.##......#.....#..#..#..##..#...##.######.####.####.#.#...#.......#..#.#.#...####.##.#......#..#...##.#.##..#...##.#.##..###.#......#.#.......#.#.#.####.###.##...#.....####.#..#..#.##.#....##..#.####....##...##..#...#......#.#.......#.......##..####..#...#.#.#...##..#.#..###..#####........#..####......#..#
#..#.
#....
##..#
..#..
..###"""), 35)
if __name__ == '__main__':
unittest.main()
| 37.347826
| 549
| 0.230501
|
082cf55b25bdea24bad654fa35e868e41deec80e
| 4,357
|
py
|
Python
|
dnsdb/config.py
|
baiyongjie/open_dnsdb
|
b5b7a69e439080cd6d85b692825ed56cd8f5c80a
|
[
"Apache-2.0"
] | 378
|
2019-01-22T02:16:28.000Z
|
2022-03-31T01:34:27.000Z
|
dnsdb/config.py
|
baiyongjie/open_dnsdb
|
b5b7a69e439080cd6d85b692825ed56cd8f5c80a
|
[
"Apache-2.0"
] | 51
|
2019-01-23T03:15:16.000Z
|
2021-05-08T02:22:23.000Z
|
dnsdb/config.py
|
baiyongjie/open_dnsdb
|
b5b7a69e439080cd6d85b692825ed56cd8f5c80a
|
[
"Apache-2.0"
] | 139
|
2019-01-22T02:43:39.000Z
|
2022-02-21T09:16:01.000Z
|
# -*- coding: utf-8 -*-
import os
import sys
from datetime import timedelta
from oslo_config import cfg
CONF = cfg.CONF
CONF.register_opts([
cfg.StrOpt('log-dir'),
cfg.StrOpt('log-file'),
cfg.StrOpt('debug'),
cfg.StrOpt('verbose'),
], 'log')
CONF.register_opts([
cfg.StrOpt('connection'),
cfg.StrOpt('data'),
], 'DB')
CONF.register_opts([
cfg.StrOpt('server'),
cfg.StrOpt('port'),
cfg.StrOpt('from_addr'),
cfg.StrOpt('password', default=''),
cfg.StrOpt('info_list'),
cfg.StrOpt('alert_list'),
], 'MAIL')
CONF.register_opts([
cfg.StrOpt('allow_ip'),
cfg.StrOpt('secret_key'),
cfg.StrOpt('env'),
cfg.StrOpt('header_template', default='../etc/template/zone_header')
], 'etc')
CONF.register_opts([
cfg.IntOpt('dnsupdater_port'),
], 'api')
CONF.register_opts([
cfg.StrOpt('acl_groups'),
cfg.IntOpt('cname_ttl'),
cfg.StrOpt('view_zone'),
cfg.DictOpt('normal_view'),
cfg.DictOpt('normal_cname'),
], 'view')
CONF.register_opts([
cfg.StrOpt('base-url',
default='/',
help='The url prefix of this site.'),
cfg.StrOpt('run-mode',
default="werkzeug",
choices=('gunicorn', 'werkzeug'),
help="Run server use the specify mode."),
cfg.StrOpt('bind',
default='0.0.0.0',
help='The IP address to bind'),
cfg.IntOpt('port',
default=8080,
help='The port to listen'),
cfg.BoolOpt('debug',
default=False),
], 'web')
CONF.register_opts([
cfg.StrOpt('config',
default=None,
help='The path to a Gunicorn config file.'),
cfg.StrOpt('bind',
default='127.0.0.1:8888'),
cfg.IntOpt('workers',
default=0,
help='The number of worker processes for handling requests'),
cfg.BoolOpt('daemon',
default=False,
help='Daemonize the Gunicorn process'),
cfg.StrOpt('accesslog',
default=None,
help='The Access log file to write to.'
'"-" means log to stderr.'),
cfg.StrOpt('loglevel',
default='info',
help='The granularity of Error log outputs.',
choices=('debug', 'info', 'warning', 'error', 'critical')),
cfg.BoolOpt('ignore-healthcheck-accesslog',
default=False),
cfg.IntOpt('timeout',
default=30,
help='Workers silent for more than this many seconds are '
'killed and restarted.'),
cfg.StrOpt('worker-class',
default='sync',
help='The type of workers to use.',
choices=('sync', 'eventlet', 'gevent', 'tornado'))
], 'gunicorn')
def setup_config(app_env, app_kind, conf_dir):
if "--" in sys.argv:
args = sys.argv[sys.argv.index("--") + 1:]
else:
args = []
common_config_file = os.path.join(conf_dir, "etc/{}/common.conf".format(app_env))
default_config_files = [common_config_file]
app_config_file = os.path.join(conf_dir, "etc/{}/{}.conf".format(app_env, app_kind))
default_config_files.append(app_config_file)
CONF(default_config_files=default_config_files, args=args)
class Config(object):
def __init__(self, app_env, app_kind, conf_dir):
# print 'conf_dir: ', conf_dir
if "--" in sys.argv:
args = sys.argv[sys.argv.index("--") + 1:]
else:
args = []
common_config_file = os.path.join(conf_dir, "etc/{}/common.conf".format(app_env))
default_config_files = [common_config_file]
app_config_file = os.path.join(conf_dir, "etc/{}/{}.conf".format(app_env, app_kind))
default_config_files.append(app_config_file)
CONF(default_config_files=default_config_files, args=args)
self.SECRET_KEY = os.environ.get('SECRET_KEY') or CONF.etc.secret_key
self.SQLALCHEMY_DATABASE_URI = CONF.DB.connection
self.SQLALCHEMY_TRACK_MODIFICATIONS = False
self.PERMANENT_SESSION_LIFETIME = timedelta(days=1)
# SECRET_KEY = os.environ.get('SECRET_KEY') or CONF.etc.secret_key
# SQLALCHEMY_DATABASE_URI = CONF.DB.connection
# SQLALCHEMY_TRACK_MODIFICATIONS = False
# PERMANENT_SESSION_LIFETIME = timedelta(days=1)
| 31.572464
| 92
| 0.596511
|
abca2fb04658b1500cf193c95b08a20b4c10effc
| 3,136
|
py
|
Python
|
canvas/canvas.py
|
TriumGroup/3d-cubes
|
6e91dbac9b9fcaca53acdb58d033210b21532b27
|
[
"MIT"
] | null | null | null |
canvas/canvas.py
|
TriumGroup/3d-cubes
|
6e91dbac9b9fcaca53acdb58d033210b21532b27
|
[
"MIT"
] | null | null | null |
canvas/canvas.py
|
TriumGroup/3d-cubes
|
6e91dbac9b9fcaca53acdb58d033210b21532b27
|
[
"MIT"
] | null | null | null |
from math import trunc
from canvas.canvas_point import CanvasPoint
from canvas.line_drawer import LineDrawer
class Canvas:
BLANK_POINT = CanvasPoint(float('-inf'), (255, 255, 255, 255))
def __init__(self, renderer):
self._width, self._height = renderer.size
self.texture = []
self.clear()
def draw_line(self, point_a, point_b):
LineDrawer(self, *point_a, *point_b).draw()
def clear(self):
self.texture = [
[self.BLANK_POINT] * self._height for _ in range(self._width)
]
def draw_point(self, x, y, z, is_dash=False, color=(0, 0, 0, 255)):
point_in_canvas = 0 <= x < self._width and 0 <= y < self._height
if point_in_canvas and (self.texture[x][y].z_index < z or is_dash):
self.texture[x][y] = CanvasPoint(z, color)
def draw_rect(self, point_a, point_b, point_c, point_d, color):
self._draw_triangle(point_a, point_b, point_c, color)
self._draw_triangle(point_c, point_d, point_a, color)
def _draw_triangle(self, point_a, point_b, point_c, color):
if point_a[1] > point_b[1]:
point_a, point_b = point_b, point_a
if point_b[1] > point_c[1]:
point_b, point_c = point_c, point_b
if point_a[1] > point_b[1]:
point_a, point_b = point_b, point_a
d_a_b = (point_b[0] - point_a[0]) / (point_b[1] - point_a[1]) if point_b[1] - point_a[1] > 0 else 0
d_a_c = (point_c[0] - point_a[0]) / (point_c[1] - point_a[1]) if point_c[1] - point_a[1] > 0 else 0
if d_a_b > d_a_c:
for y in range(trunc(point_a[1]), trunc(point_c[1])):
if y < point_b[1]:
self._process_scan_line(y, point_a, point_c, point_a, point_b, color)
else:
self._process_scan_line(y, point_a, point_c, point_b, point_c, color)
else:
for y in range(trunc(point_a[1]), trunc(point_c[1])):
if y < point_b[1]:
self._process_scan_line(y, point_a, point_b, point_a, point_c, color)
else:
self._process_scan_line(y, point_b, point_c, point_a, point_c, color)
def _process_scan_line(self, y, point_a, point_b, point_c, point_d, color):
x_a, y_a, z_a = point_a
x_b, y_b, z_b = point_b
x_c, y_c, z_c = point_c
x_d, y_d, z_d = point_d
gradient1 = (y - y_a) / (y_b - y_a) if y_a != y_b else 1
gradient2 = (y - y_c) / (y_d - y_c) if y_c != y_d else 1
sx = round(self._interpolate(x_a, x_b, gradient1))
ex = round(self._interpolate(x_c, x_d, gradient2))
z1 = self._interpolate(z_a, z_b, gradient1)
z2 = self._interpolate(z_c, z_d, gradient2)
for x in range(sx, ex):
gradient = (x - sx) / (ex - sx)
z = self._interpolate(z1, z2, gradient)
self.draw_point(x, y, z, color=color)
def _interpolate(self, minimum, maximum, gradient):
return minimum + (maximum - minimum) * self._clamp(gradient)
def _clamp(self, value):
return max(0, min(value, 1))
| 40.727273
| 107
| 0.589286
|
f843963a7085dd8fc6f1370447ee6e3745d6ff5b
| 3,027
|
py
|
Python
|
kale/prepdata/tensor_reshape.py
|
Sheffield-TALE/pykale
|
a28bfc4c444c945bf6820e6b558dc5db0fcb4083
|
[
"MIT"
] | null | null | null |
kale/prepdata/tensor_reshape.py
|
Sheffield-TALE/pykale
|
a28bfc4c444c945bf6820e6b558dc5db0fcb4083
|
[
"MIT"
] | null | null | null |
kale/prepdata/tensor_reshape.py
|
Sheffield-TALE/pykale
|
a28bfc4c444c945bf6820e6b558dc5db0fcb4083
|
[
"MIT"
] | null | null | null |
import torch
# dimension locations in a typical image batch tensor
SPATIAL_BATCH_DIMENSION = 0
SPATIAL_CHANNEL_DIMENSION = 1
SPATIAL_HEIGHT_DIMENSION = 2
SPATIAL_WIDTH_DIMENSION = 3
NUMBER_OF_DIMENSIONS = 4
def spatial_to_seq(image_tensor: torch.Tensor):
"""
Takes a torch tensor of shape (batch_size, channels, height, width)
as used and outputted by CNNs and creates a sequence view of shape
(sequence_length, batch_size, channels) as required by
torch's transformer module. In other words, unrolls the
spatial grid into the sequence length and rearranges the
dimension ordering.
Args:
image_tensor: tensor of shape (batch_size, channels, height, width) (required).
"""
original_size = image_tensor.size()
batch_size = original_size[SPATIAL_BATCH_DIMENSION]
num_channels = original_size[SPATIAL_CHANNEL_DIMENSION]
spatial_height = original_size[SPATIAL_HEIGHT_DIMENSION]
spatial_width = original_size[SPATIAL_WIDTH_DIMENSION]
permuted_tensor = image_tensor.permute(SPATIAL_HEIGHT_DIMENSION, \
SPATIAL_WIDTH_DIMENSION, \
SPATIAL_BATCH_DIMENSION, \
SPATIAL_CHANNEL_DIMENSION)
sequence_tensor = permuted_tensor.view(spatial_height*spatial_width, \
batch_size, num_channels)
return sequence_tensor
# dimension locations in a typical Transformer sequence batch tensor
SEQUENCE_LENGTH_DIMENSION = 0
SEQUENCE_BATCH_DIMENSION = 1
SEQUENCE_FEATURE_DIMENSION = 2
SEQUENCE_NUMBER_OF_DIMENSIONS = 3
def seq_to_spatial(sequence_tensor: torch.Tensor, desired_height: int, desired_width: int):
"""Takes a torch tensor of shape (sequence_length, batch_size, num_features)
as used and outputted by Transformers and creates a view of shape
(batch_size, num_features, height, width) as used and outputted by CNNs.
In other words, rearranges the dimension ordering and rolls
sequence_length into (height,width). height*width must equal
the sequence length of the input sequence.
Args:
sequence_tensor: sequence tensor of shape (sequence_length, batch_size, num_features) (required).
desired_height: the height into which the sequence length should be rolled into (required).
desired_width: the width into which the sequence length should be rolled into (required).
"""
original_size = sequence_tensor.size()
batch_size = original_size[SEQUENCE_BATCH_DIMENSION]
num_channels = original_size[SEQUENCE_FEATURE_DIMENSION]
permuted_tensor = sequence_tensor.permute(SEQUENCE_BATCH_DIMENSION, \
SEQUENCE_FEATURE_DIMENSION, \
SEQUENCE_LENGTH_DIMENSION)
spatial_tensor = permuted_tensor.view(batch_size, num_channels, \
desired_height, desired_width)
return spatial_tensor
| 40.36
| 105
| 0.702676
|
b70563e1fba3f19a2a17c745771dad8abea2795f
| 4,512
|
py
|
Python
|
ml/ml_recognizer.py
|
andresodio/lager
|
9a7e17113bfbc11cf4f734511a4c5e0cacbf2ff2
|
[
"MIT"
] | null | null | null |
ml/ml_recognizer.py
|
andresodio/lager
|
9a7e17113bfbc11cf4f734511a4c5e0cacbf2ff2
|
[
"MIT"
] | null | null | null |
ml/ml_recognizer.py
|
andresodio/lager
|
9a7e17113bfbc11cf4f734511a4c5e0cacbf2ff2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#@title Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#@title MIT License
#
# Copyright (c) 2017 François Chollet
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# Machine-learning based LaGeR recognizer using TensorFlow DNN classifier
# Partially based on:
# https://github.com/tensorflow/models/blob/master/samples/core/tutorials/keras/basic_classification.ipynb
# https://colab.research.google.com/notebooks/mlcc/multi-class_classification_of_handwritten_digits.ipynb
# TensorFlow and tf.keras
import tensorflow as tf
from tensorflow import keras
# Helper libraries
import os
import numpy as np
import pandas as pd
from skimage.transform import resize
import sys
import time
# Custom libraries
from lager_ml_common import _GESTURE_LIST, _NUM_CLASSES, _NUM_FEATURES, _MAX_FEATURE_VALUE, convert_lager_to_numbers, expand_gesture_num_to_target
class_names = _GESTURE_LIST
model = keras.models.load_model(os.environ["HOME"] + '/lager_model.h5') # loads the model from an HDF5 file
# Call classifier with dummy predicition to speed up subsequent calls
# The number of zeroes in the dummy is twice the number of features because
# we have 2 dimensions (2 sensors per movement).
dummy_sample = np.array([np.zeros(_NUM_FEATURES*2)],dtype=np.float32)
dummy_sample = dummy_sample.reshape(1,_NUM_FEATURES,2)
predictions_single = model.predict(dummy_sample)
def main(input_gesture = ""):
while(True):
single_gesture = 0
if (len(input_gesture) > 0):
single_gesture = 1
if (not input_gesture[0].isdigit()):
input_gesture = convert_lager_to_numbers(input_gesture)
elif (len(sys.argv) == 2):
single_gesture = 1
input_gesture = sys.argv[1]
if (not input_gesture[0].isdigit()):
input_gesture = convert_lager_to_numbers(input_gesture)
else:
print("Enter gesture values:")
input_gesture = input()
before_time = time.clock()
gesture_values = [int(e) for e in input_gesture.strip().split(',')]
gesture_values = np.array([gesture_values],dtype=np.uint32)
gesture_values = gesture_values / _MAX_FEATURE_VALUE
gesture_values.shape = (1, len(gesture_values[0]) // 2, 2)
new_samples = resize(gesture_values, (1,_NUM_FEATURES, 2), anti_aliasing=False, order=0, mode='edge')
prediction = model.predict(new_samples)
after_time = time.clock()
print("Probabilities")
print("-------------")
class_label = 0
for number in prediction[0]:
print('{:<1} {:<15} {:<1} {:>5} {:<1}'.format(" ", _GESTURE_LIST[class_label], ": ", "%.2f" % (number * 100), "%"))
class_label += 1
print("")
class_label = np.argmax(prediction[0])
probability = np.max(prediction[0])
probability = round(probability * 100, 2)
elapsed_time = int(round((after_time-before_time)*1000))
#print("Classified gesture: ", _GESTURE_LIST[class_label])
#print("Probability: ", probability, "%")
#print("Elapsed time: ", elapsed_time, "ms")
if single_gesture:
return class_label, probability, elapsed_time
if __name__ == "__main__":
main("")
| 37.915966
| 146
| 0.749778
|
c0754a0c945cde0cca7a08ce8dbe42e8dcbccd9a
| 4,339
|
py
|
Python
|
pds_pipelines/available_modules.py
|
robotprogrammer22/PDS-Pipelines
|
a7f53122efadf18462696d78253106c1bc03c015
|
[
"Unlicense"
] | null | null | null |
pds_pipelines/available_modules.py
|
robotprogrammer22/PDS-Pipelines
|
a7f53122efadf18462696d78253106c1bc03c015
|
[
"Unlicense"
] | null | null | null |
pds_pipelines/available_modules.py
|
robotprogrammer22/PDS-Pipelines
|
a7f53122efadf18462696d78253106c1bc03c015
|
[
"Unlicense"
] | null | null | null |
import warnings
from os import rename
available_modules = {}
try:
from pysis import isis
available_modules['isis'] = isis
except Exception as e:
warnings.warn('Unable to add isis to the available modules. ' +
f'Failed with the following {e}.')
try:
import gdal
available_modules['gdal'] = gdal
# Python GDAL doesn't use exceptions by default.
gdal.UseExceptions()
except Exception as e:
warnings.warn('Unable to add gdal to the available modules. ' +
f'Failed with the following {e}.')
try:
import ogr
available_modules['ogr'] = ogr
except Exception as e:
warnings.warn('Unable to add ogr to the available modules. ' +
f'Failed with the following {e}.')
def gdal_translate(dest, src, *args, **kwargs):
try:
# If outputType is specified, convert it to gdal datatype
kwargs['outputType'] = gdal.GetDataTypeByName(kwargs['outputType'])
except KeyError:
# If outputType not specified, no conversion is necessary and GDAL will
# use default arguments.
pass
opts = gdal.TranslateOptions(*args, **kwargs)
return gdal.Translate(dest, src, options=opts)
def gdal_polygonize(input_file, output_name, mask='default', *args, **kwargs):
driver = ogr.GetDriverByName("ESRI Shapefile")
src_ds = gdal.Open(input_file)
src_band = src_ds.GetRasterBand(1)
if mask == 'default':
mask_band = src_band.GetMaskBand()
elif mask.lower() == 'none':
mask_band = None
else:
mask_ds = gdal.Open(mask)
mask_band = mask_ds.GetRasterBand(1)
srs = src_ds.GetSpatialRef()
output_datasource = driver.CreateDataSource(output_name)
out_layer = output_datasource.CreateLayer(output_name, srs=srs)
field = ogr.FieldDefn('DN', ogr.OFTInteger)
out_layer.CreateField(field)
field_id = out_layer.GetLayerDefn().GetFieldIndex('DN')
return gdal.Polygonize(src_band, mask_band, out_layer, field_id, [], **kwargs)
def ogr2ogr(dest, src, *args, **kwargs):
srcDS = gdal.OpenEx(src)
opts = gdal.VectorTranslateOptions(skipFailures=True, *args, **kwargs)
ds = gdal.VectorTranslate(dest, srcDS=srcDS, options = opts)
# Dataset isn't written until dataset is closed and dereferenced
# https://gis.stackexchange.com/questions/255586/gdal-vectortranslate-returns-empty-object
del ds
def get_single_band_cube(cube,out_cube,band_list,keyname):
"""
Convenience function to extract a single band from an ISIS cube based on a prioritized list of band numbers,
and the name of a keyword to search for in the BandBin group of the cube.
This is necessary for generating browse/thumbnail images from multiband images where certain bands are preferred
over others for use in the output, but there is no way of knowing whether the preferred band is present without
inspecting the ingested ISIS cube.
Parameters
----------
cube : str
A string file path to the input cube.
out_cube : str
A string file path to the desired output cube.
band_list : list
A list of ints representing band numbers to search for in cube, in decreasing order of priority
keyname : str
The name of the keyword to look for in the BandBin group of the input cube.
Returns
-------
isis.cubeatt() : function
Calls the ISIS function, cubeatt, in order to write out a single band cube
"""
bands_in_cube = isis.getkey(from_=cube, objname="IsisCube", grpname="BandBin", keyword=keyname)
if isinstance(bands_in_cube, bytes):
bands_in_cube = bands_in_cube.decode()
bands_in_cube = bands_in_cube.replace('\n', '').replace(' ', '').split(',')
bands_in_cube = [int(x) for x in bands_in_cube]
for band in band_list:
if band in bands_in_cube:
isis.cubeatt(from_=cube + '+' + str(band), to=out_cube)
break
else:
continue
return
def cube_rename(src, dest):
""" Thin wrapper to make os.rename available in recipes.
Parameters
----------
src : str
A string file path to the file that will be renamed.
dest : str
A string file path that serves as the new file path.
"""
rename(src, dest)
return
| 33.376923
| 116
| 0.666974
|
0efeaf832869a3f6e5eedf5ebdbd98b181d7343d
| 403
|
py
|
Python
|
server/utils/global_utils.py
|
LiteralGenie/HvLog
|
8bb9a68e8b6fdd447dcd2f7bc9870ebc1dfb0916
|
[
"MIT"
] | null | null | null |
server/utils/global_utils.py
|
LiteralGenie/HvLog
|
8bb9a68e8b6fdd447dcd2f7bc9870ebc1dfb0916
|
[
"MIT"
] | null | null | null |
server/utils/global_utils.py
|
LiteralGenie/HvLog
|
8bb9a68e8b6fdd447dcd2f7bc9870ebc1dfb0916
|
[
"MIT"
] | null | null | null |
from os.path import dirname, sep
SRC_DIR= dirname(dirname(__file__)) + sep
PROJ_DIR= dirname(dirname(SRC_DIR)) + sep
DATA_DIR= PROJ_DIR + "data" + sep
CONFIG_DIR= PROJ_DIR + "config" + sep
LOG_DIR= PROJ_DIR + "logs" + sep
LOGGING_CONFIG= CONFIG_DIR + "logging.yaml"
CONFIG_FILE= CONFIG_DIR + "config.yaml"
# DATABASE_FILE= DATA_DIR + "db.fs"
DATABASE_FILE= PROJ_DIR + 'scratch/db_data/test_db_1.fs'
| 26.866667
| 56
| 0.741935
|
0032c03c0f0d6866990980ceb22a9ac7c930bbd7
| 549
|
py
|
Python
|
sent-al/modules/forms.py
|
sharad461/sentiment-analysis
|
6ae9efbe758afbe127fe4c63d4062c656d1d99bd
|
[
"MIT"
] | null | null | null |
sent-al/modules/forms.py
|
sharad461/sentiment-analysis
|
6ae9efbe758afbe127fe4c63d4062c656d1d99bd
|
[
"MIT"
] | null | null | null |
sent-al/modules/forms.py
|
sharad461/sentiment-analysis
|
6ae9efbe758afbe127fe4c63d4062c656d1d99bd
|
[
"MIT"
] | 1
|
2021-09-18T08:48:57.000Z
|
2021-09-18T08:48:57.000Z
|
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, TextAreaField
from wtforms.validators import Length, DataRequired
class KeywordSearch(FlaskForm):
keyword = StringField(validators = [DataRequired(),Length(min=2, max=20)])
submit = SubmitField('Search Tweets')
class ProfileSearch(FlaskForm):
searchTerm = StringField(validators=[DataRequired(), Length(min=2, max=20)])
submit = SubmitField('Go')
class TextClassification(FlaskForm):
text = TextAreaField(validators = [DataRequired()])
submit = SubmitField("Go")
| 36.6
| 77
| 0.783242
|
8013554950b9871855ce42b561ec766e31fb737c
| 15,318
|
py
|
Python
|
prody/dynamics/adaptive.py
|
kaynakb/ProDy
|
4366ad28142f51ff8a84f8a0f4ce659c0b949d55
|
[
"MIT"
] | 210
|
2015-01-26T08:17:56.000Z
|
2022-03-30T01:40:34.000Z
|
prody/dynamics/adaptive.py
|
kaynakb/ProDy
|
4366ad28142f51ff8a84f8a0f4ce659c0b949d55
|
[
"MIT"
] | 555
|
2015-01-05T21:51:54.000Z
|
2022-03-31T16:51:41.000Z
|
prody/dynamics/adaptive.py
|
kaynakb/ProDy
|
4366ad28142f51ff8a84f8a0f4ce659c0b949d55
|
[
"MIT"
] | 99
|
2015-02-09T18:00:39.000Z
|
2022-03-07T12:52:51.000Z
|
# -*- coding: utf-8 -*-
"""This module defines functions for performing adaptive ANM."""
from prody.atomic import Atomic, AtomMap
import time
from numbers import Integral, Number
import numpy as np
from prody import LOGGER
from prody.utilities import getCoords, importLA
from prody.measure import calcRMSD, calcDistance, superpose
from prody.ensemble import Ensemble
from .functions import calcENM
from .modeset import ModeSet
__all__ = ['calcAdaptiveANM', 'AANM_ONEWAY', 'AANM_ALTERNATING', 'AANM_BOTHWAYS', 'AANM_DEFAULT']
AANM_ALTERNATING = 0
AANM_ONEWAY = 1
AANM_BOTHWAYS = 2
AANM_DEFAULT = AANM_ALTERNATING
norm = importLA().norm
def checkInput(a, b, **kwargs):
coordsA = getCoords(a)
if isinstance(a, Atomic):
title = a.getTitle()
atoms = a
else:
title = None
atoms = None
coordsB = getCoords(b)
if title is None:
if isinstance(b, Atomic):
title = b.getTitle()
atoms = b
else:
title = 'Unknown'
atoms = None
maskA = a.getFlags("mapped") if isinstance(a, AtomMap) else 1.
maskB = b.getFlags("mapped") if isinstance(b, AtomMap) else 1.
weights = maskA * maskB
if np.isscalar(weights):
weights = None
if np.isscalar(maskA):
maskA = None
if np.isscalar(maskB):
maskB = None
aligned = kwargs.get('aligned', False)
if not aligned:
coordsA, _ = superpose(coordsA, coordsB, weights)
rmsd = calcRMSD(coordsA, coordsB, weights)
LOGGER.info('Initialized Adaptive ANM with RMSD {:4.3f}\n'.format(rmsd))
return coordsA, coordsB, title, atoms, weights, maskA, maskB, rmsd
def getTitle(structure, def_title='structure'):
if isinstance(structure, Atomic):
title = structure.getTitle()
else:
title = def_title
return title
def calcStep(initial, target, n_modes, ensemble, defvecs, rmsds, mask=None, callback_func=None, **kwargs):
"""Runs a single step of adaptive ANM.
Modes will be calculated for *initial* with a square cumulative overlap above a threshold defined by
*Fmin* and used for transitioning towards *target*.
"""
Fmin = kwargs.get('Fmin', None)
f = kwargs.get('f', 0.2)
Fmin_max = kwargs.get('Fmin_max', 0.6)
resetFmin = kwargs.get('resetFmin', False)
weights = ensemble.getWeights()
if weights is not None:
weights = weights.flatten()
#coords_init, _ = superpose(initial, target, weights) # we should keep this off otherwise RMSD calculations are off
coords_init = initial
coords_tar = target
dof = coords_init.shape[0] - 6
n_max_modes = kwargs.get('n_max_modes', None)
if n_max_modes is None:
n_max_modes = dof
if n_max_modes < 1:
n_max_modes = int(n_max_modes * dof)
if n_max_modes > dof:
n_max_modes = dof
if n_modes > n_max_modes:
n_modes = n_max_modes
model = kwargs.pop('model', 'anm')
anm, _ = calcENM(coords_init, select=mask, mask=mask,
model=model, trim='trim', n_modes=n_modes,
**kwargs)
if mask is not None:
anm.masked = False
defvec = coords_tar - coords_init
d = defvec.flatten()
if weights is not None:
d *= weights.repeat(3)
defvecs.append(d)
if Fmin is None:
if resetFmin:
Fmin = 0. # Select the first mode only
else:
Fmin = 1 - np.sqrt(norm(defvecs[-1])/norm(defvecs[0]))
if Fmin > Fmin_max:
Fmin = Fmin_max
overlaps = np.dot(d, anm.getEigvecs())
normalised_overlaps = overlaps / norm(d)
c_sq = np.cumsum(np.power(normalised_overlaps, 2), axis=0)
if Fmin == 0 and resetFmin:
torf_Fmin = np.zeros(c_sq.shape, dtype=bool)
argmax_overlap = np.argmax(abs(normalised_overlaps))
torf_Fmin[argmax_overlap] = True
else:
torf_Fmin = c_sq <= Fmin
if np.any(torf_Fmin) and not np.all(torf_Fmin):
i = np.where(torf_Fmin)[0].max()
torf_Fmin[i+1] = True
if not np.any(torf_Fmin):
torf_Fmin[0] = True
selected_mode_indices = np.arange(anm.numModes())[torf_Fmin]
n_sel_modes = len(selected_mode_indices)
modes = ModeSet(anm, selected_mode_indices)
c_sq_crit = c_sq[torf_Fmin].max()
if n_sel_modes == 1:
LOGGER.info('Using 1 mode with square overlap {0}'
.format('%4.3f'%c_sq_crit))
else:
LOGGER.info('Using {0} modes with square cumulative overlap {1}'
.format(n_sel_modes, '%4.3f'%c_sq_crit))
if n_sel_modes > n_modes-5:
n_modes *= 2
if n_modes > dof:
n_modes = dof
v = modes.getEigvecs().dot(overlaps[torf_Fmin])
s = f * v.dot(d) / v.dot(v)
# update coords_init
coords_init += s * v.reshape(coords_init.shape)
# initial[:] = coords_init[:] # turn this on in case coords_init is not initial in the future
rmsd = calcRMSD(coords_init, coords_tar, weights)
rmsds.append(rmsd)
if callback_func is not None:
cbkwargs = {'init': coords_init,
'tar': coords_tar,
'modes': modes,
'defvec': d,
'c_sq': c_sq_crit,
'rmsd': rmsd}
callback_func(**cbkwargs)
# deposit
ensemble.addCoordset(coords_init.copy())
converged = checkConvergence(rmsds, coords_init, **kwargs)
if converged:
n_modes = 0
LOGGER.info('Current RMSD is {:4.3f}\n'.format(rmsd))
return n_modes
def checkConvergence(rmsds, coords, **kwargs):
"""Check convergence of adaptive ANM.
Convergence is reached if one of three conditions is met:
1. Difference between *rmsds* from previous step to current < *min_rmsd_diff*
2. Current rmsd < *target_rmsd* for the last five runs
3. A node in *coords* gets disconnected from another by > *cutoff*
"""
min_rmsd_diff = kwargs.get('min_rmsd_diff', 0.05)
target_rmsd = kwargs.get('target_rmsd', 1.0)
cutoff = kwargs.get('cutoff', 15)
if len(rmsds) > 4:
drmsd = np.abs(np.diff(rmsds))
if np.all(drmsd[-4:] < min_rmsd_diff):
LOGGER.warn(
'The RMSD decrease fell below {0}'.format(min_rmsd_diff))
return True
if rmsds[-1] < target_rmsd:
LOGGER.warn('The RMSD fell below target RMSD {0}'.format(target_rmsd))
return True
if checkDisconnection(coords, cutoff):
LOGGER.warn('Disconnections were found in one of the structures {0}')
return True
return False
def checkDisconnection(coords, cutoff):
"""Check disconnection of ANM, i.e. a node in *coords* gets
disconnected from another by > *cutoff*. This is one of the
stopping criteria for adaptive ANM.
"""
all_dists = np.array([calcDistance(coords, entry) for entry in coords])
min_dists = np.array([np.min([np.min(all_dists[i, :i]), np.min(all_dists[i, i+1:])])
for i in range(1, coords.shape[0]-1)])
if max(min_dists) > cutoff:
LOGGER.warn('A bead has become disconnected. '
'Adaptive ANM cannot proceed without unrealistic deformations')
return True
return False
def calcAdaptiveANM(a, b, n_steps, mode=AANM_DEFAULT, **kwargs):
"""Runs adaptive ANM analysis of proteins ([ZY09]_) that creates a path that
connects two conformations using normal modes.
This function can be run in three modes:
1. *AANM_ONEWAY*: all steps are run in one direction: from *a* to *b*.
2. *AANM_ALTERNATING*: steps are run in alternating directions: from *a* to *b*,
then *b* to *a*, then back again, and so on.
3. *AANM_BOTHWAYS*: steps are run in one direction (from *a* to
*b*) until convergence is reached and then the other way.
This also implementation differs from the original one in that it sorts the
modes by overlap prior to cumulative overlap calculations for efficiency.
.. [ZY09] Zheng Yang, Peter Májek, Ivet Bahar. Allosteric Transitions of
Supramolecular Systems Explored by Network Models: Application to
Chaperonin GroEL. *PLOS Comp Biol* **2009** 40:512-524.
:arg a: structure A for the transition
:type a: :class:`.Atomic`, :class:`~numpy.ndarray`
:arg b: structure B for the transition
:type b: :class:`.Atomic`, :class:`~numpy.ndarray`
:arg n_steps: the maximum number of steps to be calculated. For *AANM_BOTHWAYS*,
this means the maximum number of steps from each direction
:type n_steps: int
:arg mode: the way of the calculation to be performed, which can be either *AANM_ONEWAY*,
*AANM_ALTERNATING*, or *AANM_BOTHWAYS*. Default is *AANM_ALTERNATING*
:type mode: int
:kwarg f: step size. Default is 0.2
:type f: float
:kwarg Fmin: cutoff for selecting modes based on square cumulative overlaps
Default is **None**, which automatically determines and adapts *Fmin* on the fly.
:type Fmin: float
:kwarg Fmin_max: maximum value for *Fmin* when it is automatically determined
Default is 0.6
:type Fmin_max: float
:arg min_rmsd_diff: cutoff for rmsds converging. Default is 0.05
:type min_rmsd_diff: float
:kwarg target_rmsd: target rmsd for stopping. Default is 1.0
:type target_rmsd: float
:kwarg n_modes: the number of modes to be calculated for the first run. *n_modes*
will be dynamically adjusted later as the calculation progresses. Default is 20
:type n_modes: int
:kwarg n_max_modes: the maximum number of modes to be calculated in each run.
Default is **None**, which allows as many as degree of freedom
:type n_max_modes: int
:kwarg callback_func: a callback function that can be used to collect quantities
from each iteration. The function must accept `**kwargs` as its only input.
Keywords in `kwargs` are:
'init': the initial coordinate;
'tar': the target coordinate;
'modes': a :class:`.ModeSet` of selected modes;
'defvec': the deformation vector;
'c_sq': the critical square cumulative overlap;
'rmsd': the RMSD between the two structures after the deformation.
:type callback_func: func
Please see keyword arguments for calculating the modes in :func:`.calcENM`.
"""
if mode == AANM_ONEWAY:
return calcOneWayAdaptiveANM(a, b, n_steps, **kwargs)
elif mode == AANM_ALTERNATING:
return calcAlternatingAdaptiveANM(a, b, n_steps, **kwargs)
elif mode == AANM_BOTHWAYS:
return calcBothWaysAdaptiveANM(a, b, n_steps, **kwargs)
else:
raise ValueError('unknown aANM mode: %d'%mode)
def calcOneWayAdaptiveANM(a, b, n_steps, **kwargs):
"""Runs one-way adaptivate ANM. """
n_modes = kwargs.pop('n_modes', 20)
coordsA, coordsB, title, atoms, weights, maskA, maskB, rmsd = checkInput(a, b, **kwargs)
coordsA = coordsA.copy()
LOGGER.timeit('_prody_calcAdaptiveANM')
n = 0
resetFmin = True
defvecs = []
rmsds = [rmsd]
ensemble = Ensemble(title + '_aANM')
ensemble.setAtoms(atoms)
ensemble.setCoords(coordsB)
ensemble.setWeights(weights)
ensemble.addCoordset(coordsA.copy())
while n < n_steps:
LOGGER.info('\nStarting cycle {0} with initial structure {1}'.format(n+1, title))
n_modes = calcStep(coordsA, coordsB, n_modes, ensemble, defvecs, rmsds, mask=maskA,
resetFmin=resetFmin, **kwargs)
n += 1
resetFmin = False
if n_modes == 0:
LOGGER.report('One-way Adaptive ANM converged in %.2fs.', '_prody_calcAdaptiveANM')
break
return ensemble
def calcAlternatingAdaptiveANM(a, b, n_steps, **kwargs):
"""Runs alternating adaptivate ANM. """
n_modes = kwargs.pop('n_modes', 20)
coordsA, coordsB, title, atoms, weights, maskA, maskB, rmsd = checkInput(a, b, **kwargs)
coordsA = coordsA.copy()
coordsB = coordsB.copy()
LOGGER.timeit('_prody_calcAdaptiveANM')
n = 0
resetFmin = True
defvecs = []
rmsds = [rmsd]
ensA = Ensemble('A')
ensA.setCoords(coordsA)
ensA.setWeights(weights)
ensA.addCoordset(coordsA.copy())
ensB = Ensemble('B')
ensB.setCoords(coordsB.copy())
ensB.setWeights(weights)
ensB.addCoordset(coordsB.copy())
while n < n_steps:
LOGGER.info('\nStarting cycle {0} with {1}'.format(n + 1, getTitle(a, 'structure A')))
n_modes = calcStep(coordsA, coordsB, n_modes, ensA, defvecs, rmsds, mask=maskA,
resetFmin=resetFmin, **kwargs)
resetFmin = False
if n_modes == 0:
LOGGER.report('Alternating Adaptive ANM converged in %.2fs.', '_prody_calcAdaptiveANM')
break
LOGGER.info('\nContinuing cycle {0} with structure {1}'.format(n+1, getTitle(b, 'structure B')))
n_modes = calcStep(coordsB, coordsA, n_modes, ensB, defvecs, rmsds, mask=maskB,
resetFmin=resetFmin, **kwargs)
n += 1
if n_modes == 0:
LOGGER.report('Alternating Adaptive ANM converged in %.2fs.', '_prody_calcAdaptiveANM')
break
ensemble = ensA + ensB[::-1]
ensemble.setTitle(title + '_aANM')
ensemble.setAtoms(atoms)
ensemble.setCoords(ensB.getCoords())
return ensemble
def calcBothWaysAdaptiveANM(a, b, n_steps, **kwargs):
"""Runs both-way adaptivate ANM. """
n_modes0 = n_modes = kwargs.pop('n_modes', 20)
coordsA, coordsB, title, atoms, weights, maskA, maskB, rmsd = checkInput(a, b, **kwargs)
coordsA = coordsA.copy()
coordsB = coordsB.copy()
LOGGER.timeit('_prody_calcAdaptiveANM')
n = 0
resetFmin = True
defvecs = []
rmsds = [rmsd]
ensA = Ensemble('A')
ensA.setCoords(coordsA)
ensA.setWeights(weights)
ensA.addCoordset(coordsA.copy())
ensB = Ensemble('B')
ensB.setCoords(coordsB.copy())
ensB.setWeights(weights)
ensB.addCoordset(coordsB.copy())
while n < n_steps:
LOGGER.info('\nStarting cycle {0} with {1}'.format(n + 1, getTitle(a, 'structure A')))
n_modes = calcStep(coordsA, coordsB, n_modes, ensA, defvecs, rmsds, mask=maskA,
resetFmin=resetFmin, **kwargs)
n += 1
resetFmin = False
if n_modes == 0:
break
n = 0
n_modes = n_modes0
resetFmin = True
while n < n_steps:
LOGGER.info('\nStarting cycle {0} with structure {1}'.format(n+1, getTitle(b, 'structure B')))
n_modes = calcStep(coordsB, coordsA, n_modes, ensB, defvecs, rmsds, mask=maskB,
resetFmin=resetFmin, **kwargs)
n += 1
resetFmin = False
if n_modes == 0:
LOGGER.report('Alternating Adaptive ANM converged in %.2fs.', '_prody_calcAdaptiveANM')
break
ensemble = ensA + ensB[::-1]
ensemble.setTitle(title + '_aANM')
ensemble.setAtoms(atoms)
ensemble.setCoords(ensB.getCoords())
LOGGER.report('Both-way Adaptive ANM converged in %.2fs.', '_prody_calcAdaptiveANM')
return ensemble
| 32.800857
| 149
| 0.632524
|
c0794973537ec73c95c1e56c344c2b4292a0e252
| 1,159
|
py
|
Python
|
django_form_wizard_lab/subject/migrations/0001_initial.py
|
muhyasin89/django_framework
|
13979f9c6687f5c11ddbf3b44dd6e27a68410021
|
[
"MIT"
] | 1
|
2020-07-06T07:21:21.000Z
|
2020-07-06T07:21:21.000Z
|
django_form_wizard_lab/subject/migrations/0001_initial.py
|
muhyasin89/django_framework
|
13979f9c6687f5c11ddbf3b44dd6e27a68410021
|
[
"MIT"
] | null | null | null |
django_form_wizard_lab/subject/migrations/0001_initial.py
|
muhyasin89/django_framework
|
13979f9c6687f5c11ddbf3b44dd6e27a68410021
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-06-17 04:40
import autoslug.fields
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Subject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('name', models.CharField(max_length=255, verbose_name='Subject')),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='name')),
('number_sks', models.IntegerField()),
],
options={
'verbose_name': 'subject',
'verbose_name_plural': 'subjects',
},
),
]
| 35.121212
| 147
| 0.6195
|
f4857e131f853cd7f04b24fe180df74493186b03
| 1,595
|
py
|
Python
|
Hackerrank Practice/Find a String.py
|
falconcode16/pythonprogramming
|
fc53a879be473ebceb1d7da061b0e8fc2a20706c
|
[
"MIT"
] | 2
|
2020-04-11T14:15:10.000Z
|
2020-05-12T09:57:29.000Z
|
Hackerrank Practice/Find a String.py
|
falconcode16/pythonprogramming
|
fc53a879be473ebceb1d7da061b0e8fc2a20706c
|
[
"MIT"
] | null | null | null |
Hackerrank Practice/Find a String.py
|
falconcode16/pythonprogramming
|
fc53a879be473ebceb1d7da061b0e8fc2a20706c
|
[
"MIT"
] | 1
|
2021-10-10T02:13:42.000Z
|
2021-10-10T02:13:42.000Z
|
'''
In this challenge, the user enters a string and a substring. You have to print the number of times that the substring occurs in the given string. String traversal will take place from left to right, not from right to left.
NOTE: String letters are case-sensitive.
Input Format
The first line of input contains the original string. The next line contains the substring.
Constraints
Each character in the string is an ascii character.
Output Format
Output the integer number indicating the total number of occurrences of the substring in the original string.
Sample Input
ABCDCDC
CDC
Sample Output
2
Concept
Some string processing examples, such as these, might be useful.
There are a couple of new concepts:
In Python, the length of a string is found by the function len(s), where is the string.
To traverse through the length of a string, use a for loop:
for i in range(0, len(s)):
print (s[i])
A range function is used to loop over some length:
range (0, 5)
Here, the range loops over to . is excluded.
'''
def count_substring(string, sub_string):
c = 0
for i in range(0, len(string)):
slice_object = slice(i, len(sub_string)+i)
count = len(string[slice_object])
if(count == len(sub_string)):
if(sub_string == string[slice_object]):
c = c + 1
return c
if __name__ == '__main__':
string = input().strip()
sub_string = input().strip()
count = count_substring(string, sub_string)
print(count)
| 26.147541
| 223
| 0.667085
|
f78d0359c50049baa345eb4dc837612798e0c2e0
| 96,414
|
py
|
Python
|
pysnmp/GSM7224-SWITCHING-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/GSM7224-SWITCHING-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/GSM7224-SWITCHING-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module GSM7224-SWITCHING-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/GSM7224-SWITCHING-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:06:38 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion")
AgentPortMask, gsm7224 = mibBuilder.importSymbols("GSM7224-REF-MIB", "AgentPortMask", "gsm7224")
IANAifType, = mibBuilder.importSymbols("IANAifType-MIB", "IANAifType")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
dot1qFdbId, VlanIndex, dot1qVlanIndex = mibBuilder.importSymbols("Q-BRIDGE-MIB", "dot1qFdbId", "VlanIndex", "dot1qVlanIndex")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, IpAddress, ObjectIdentity, TimeTicks, ModuleIdentity, Integer32, MibIdentifier, Gauge32, NotificationType, Counter64, Bits, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "IpAddress", "ObjectIdentity", "TimeTicks", "ModuleIdentity", "Integer32", "MibIdentifier", "Gauge32", "NotificationType", "Counter64", "Bits", "Counter32")
PhysAddress, MacAddress, TextualConvention, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "PhysAddress", "MacAddress", "TextualConvention", "RowStatus", "DisplayString")
gsm7224Switching = ModuleIdentity((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1))
gsm7224Switching.setRevisions(('2003-02-06 18:35',))
if mibBuilder.loadTexts: gsm7224Switching.setLastUpdated('200311101200Z')
if mibBuilder.loadTexts: gsm7224Switching.setOrganization('Netgear')
agentInfoGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1))
agentInventoryGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 1))
agentInventorySysDescription = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentInventorySysDescription.setStatus('current')
agentInventoryMachineType = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentInventoryMachineType.setStatus('current')
agentInventoryBurnedInMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 1, 3), PhysAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentInventoryBurnedInMacAddress.setStatus('current')
agentInventoryAdditionalPackages = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentInventoryAdditionalPackages.setStatus('current')
agentInventorySoftwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentInventorySoftwareVersion.setStatus('current')
agentTrapLogGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 2))
agentTrapLogTotal = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 2, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentTrapLogTotal.setStatus('current')
agentTrapLogTotalSinceLastViewed = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 2, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentTrapLogTotalSinceLastViewed.setStatus('deprecated')
agentTrapLogTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 2, 4), )
if mibBuilder.loadTexts: agentTrapLogTable.setStatus('current')
agentTrapLogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 2, 4, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentTrapLogIndex"))
if mibBuilder.loadTexts: agentTrapLogEntry.setStatus('current')
agentTrapLogIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 2, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentTrapLogIndex.setStatus('current')
agentTrapLogSystemTime = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 2, 4, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentTrapLogSystemTime.setStatus('current')
agentTrapLogTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 2, 4, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 512))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentTrapLogTrap.setStatus('current')
agentSupportedMibTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 3), )
if mibBuilder.loadTexts: agentSupportedMibTable.setStatus('current')
agentSupportedMibEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 3, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentSupportedMibIndex"))
if mibBuilder.loadTexts: agentSupportedMibEntry.setStatus('current')
agentSupportedMibIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSupportedMibIndex.setStatus('current')
agentSupportedMibName = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 3, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSupportedMibName.setStatus('current')
agentSupportedMibDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 1, 3, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 512))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSupportedMibDescription.setStatus('current')
agentConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2))
agentCLIConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1))
agentLoginSessionTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 1), )
if mibBuilder.loadTexts: agentLoginSessionTable.setStatus('current')
agentLoginSessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 1, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentLoginSessionIndex"))
if mibBuilder.loadTexts: agentLoginSessionEntry.setStatus('current')
agentLoginSessionIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLoginSessionIndex.setStatus('current')
agentLoginSessionUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLoginSessionUserName.setStatus('current')
agentLoginSessionIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLoginSessionIPAddress.setStatus('current')
agentLoginSessionConnectionType = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("serial", 1), ("telnet", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLoginSessionConnectionType.setStatus('current')
agentLoginSessionIdleTime = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 1, 1, 5), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLoginSessionIdleTime.setStatus('current')
agentLoginSessionSessionTime = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 1, 1, 6), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLoginSessionSessionTime.setStatus('current')
agentLoginSessionStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 1, 1, 7), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLoginSessionStatus.setStatus('current')
agentTelnetConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 2))
agentTelnetLoginTimeout = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 160))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTelnetLoginTimeout.setStatus('current')
agentTelnetMaxSessions = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTelnetMaxSessions.setStatus('current')
agentTelnetAllowNewMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTelnetAllowNewMode.setStatus('current')
agentUserConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3))
agentUserConfigCreate = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserConfigCreate.setStatus('current')
agentUserConfigTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2), )
if mibBuilder.loadTexts: agentUserConfigTable.setStatus('current')
agentUserConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentUserIndex"))
if mibBuilder.loadTexts: agentUserConfigEntry.setStatus('current')
agentUserIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2, 1, 1), Integer32())
if mibBuilder.loadTexts: agentUserIndex.setStatus('current')
agentUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserName.setStatus('current')
agentUserPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserPassword.setStatus('current')
agentUserAccessMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("read", 1), ("write", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentUserAccessMode.setStatus('current')
agentUserStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserStatus.setStatus('current')
agentUserAuthenticationType = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("hmacmd5", 2), ("hmacsha", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserAuthenticationType.setStatus('current')
agentUserEncryptionType = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("none", 1), ("des", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserEncryptionType.setStatus('current')
agentUserEncryptionPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 3, 2, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(8, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserEncryptionPassword.setStatus('current')
agentSerialGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 5))
agentSerialTimeout = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 5, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 160))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSerialTimeout.setStatus('current')
agentSerialBaudrate = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 5, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("baud-1200", 1), ("baud-2400", 2), ("baud-4800", 3), ("baud-9600", 4), ("baud-19200", 5), ("baud-38400", 6), ("baud-57600", 7), ("baud-115200", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSerialBaudrate.setStatus('current')
agentSerialCharacterSize = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 5, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSerialCharacterSize.setStatus('current')
agentSerialHWFlowControlMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 5, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSerialHWFlowControlMode.setStatus('current')
agentSerialStopBits = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 5, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSerialStopBits.setStatus('current')
agentSerialParityType = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 1, 5, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("even", 1), ("odd", 2), ("none", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSerialParityType.setStatus('current')
agentLagConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2))
agentLagConfigCreate = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagConfigCreate.setStatus('current')
agentLagSummaryConfigTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2), )
if mibBuilder.loadTexts: agentLagSummaryConfigTable.setStatus('current')
agentLagSummaryConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentLagSummaryLagIndex"))
if mibBuilder.loadTexts: agentLagSummaryConfigEntry.setStatus('current')
agentLagSummaryLagIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLagSummaryLagIndex.setStatus('current')
agentLagSummaryName = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagSummaryName.setStatus('current')
agentLagSummaryFlushTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagSummaryFlushTimer.setStatus('obsolete')
agentLagSummaryLinkTrap = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagSummaryLinkTrap.setStatus('current')
agentLagSummaryAdminMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagSummaryAdminMode.setStatus('current')
agentLagSummaryStpMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("dot1d", 1), ("fast", 2), ("off", 3), ("dot1s", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagSummaryStpMode.setStatus('current')
agentLagSummaryAddPort = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagSummaryAddPort.setStatus('current')
agentLagSummaryDeletePort = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagSummaryDeletePort.setStatus('current')
agentLagSummaryStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 9), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagSummaryStatus.setStatus('current')
agentLagSummaryType = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("static", 1), ("dynamic", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLagSummaryType.setStatus('current')
agentLagDetailedConfigTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 3), )
if mibBuilder.loadTexts: agentLagDetailedConfigTable.setStatus('current')
agentLagDetailedConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 3, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentLagDetailedLagIndex"), (0, "GSM7224-SWITCHING-MIB", "agentLagDetailedIfIndex"))
if mibBuilder.loadTexts: agentLagDetailedConfigEntry.setStatus('current')
agentLagDetailedLagIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLagDetailedLagIndex.setStatus('current')
agentLagDetailedIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLagDetailedIfIndex.setStatus('current')
agentLagDetailedPortSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 3, 1, 3), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLagDetailedPortSpeed.setStatus('current')
agentLagDetailedPortStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("active", 1), ("inactive", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentLagDetailedPortStatus.setStatus('current')
agentLagConfigStaticCapability = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLagConfigStaticCapability.setStatus('current')
agentNetworkConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 3))
agentNetworkIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 3, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentNetworkIPAddress.setStatus('current')
agentNetworkSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 3, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentNetworkSubnetMask.setStatus('current')
agentNetworkDefaultGateway = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 3, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentNetworkDefaultGateway.setStatus('current')
agentNetworkBurnedInMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 3, 4), PhysAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentNetworkBurnedInMacAddress.setStatus('current')
agentNetworkConfigProtocol = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 3, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("bootp", 2), ("dhcp", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentNetworkConfigProtocol.setStatus('current')
agentNetworkWebMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentNetworkWebMode.setStatus('current')
agentNetworkJavaMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 3, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentNetworkJavaMode.setStatus('current')
agentNetworkMgmtVlan = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 3, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentNetworkMgmtVlan.setStatus('current')
agentServicePortConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 4))
agentServicePortIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 4, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentServicePortIPAddress.setStatus('current')
agentServicePortSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 4, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentServicePortSubnetMask.setStatus('current')
agentServicePortDefaultGateway = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 4, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentServicePortDefaultGateway.setStatus('current')
agentServicePortBurnedInMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 4, 4), PhysAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentServicePortBurnedInMacAddress.setStatus('current')
agentServicePortConfigProtocol = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 4, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("bootp", 2), ("dhcp", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentServicePortConfigProtocol.setStatus('current')
agentSnmpConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6))
agentSnmpCommunityCreate = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpCommunityCreate.setStatus('current')
agentSnmpCommunityConfigTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 2), )
if mibBuilder.loadTexts: agentSnmpCommunityConfigTable.setStatus('current')
agentSnmpCommunityConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 2, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentSnmpCommunityIndex"))
if mibBuilder.loadTexts: agentSnmpCommunityConfigEntry.setStatus('current')
agentSnmpCommunityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSnmpCommunityIndex.setStatus('current')
agentSnmpCommunityName = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpCommunityName.setStatus('current')
agentSnmpCommunityIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 2, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpCommunityIPAddress.setStatus('current')
agentSnmpCommunityIPMask = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 2, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpCommunityIPMask.setStatus('current')
agentSnmpCommunityAccessMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("read-only", 1), ("read-write", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpCommunityAccessMode.setStatus('current')
agentSnmpCommunityStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("active", 1), ("notInService", 2), ("config", 3), ("destroy", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpCommunityStatus.setStatus('current')
agentSnmpTrapReceiverCreate = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpTrapReceiverCreate.setStatus('current')
agentSnmpTrapReceiverConfigTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 4), )
if mibBuilder.loadTexts: agentSnmpTrapReceiverConfigTable.setStatus('current')
agentSnmpTrapReceiverConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 4, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentSnmpTrapReceiverIndex"))
if mibBuilder.loadTexts: agentSnmpTrapReceiverConfigEntry.setStatus('current')
agentSnmpTrapReceiverIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSnmpTrapReceiverIndex.setStatus('current')
agentSnmpTrapReceiverCommunityName = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 4, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpTrapReceiverCommunityName.setStatus('current')
agentSnmpTrapReceiverIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 4, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpTrapReceiverIPAddress.setStatus('current')
agentSnmpTrapReceiverStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("active", 1), ("notInService", 2), ("config", 3), ("destroy", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpTrapReceiverStatus.setStatus('current')
agentSnmpTrapFlagsConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 5))
agentSnmpAuthenticationTrapFlag = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpAuthenticationTrapFlag.setStatus('current')
agentSnmpLinkUpDownTrapFlag = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 5, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpLinkUpDownTrapFlag.setStatus('current')
agentSnmpMultipleUsersTrapFlag = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 5, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpMultipleUsersTrapFlag.setStatus('current')
agentSnmpSpanningTreeTrapFlag = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 5, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpSpanningTreeTrapFlag.setStatus('current')
agentSnmpBroadcastStormTrapFlag = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 6, 5, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSnmpBroadcastStormTrapFlag.setStatus('current')
agentSpanningTreeConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 7))
agentSpanningTreeMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 7, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSpanningTreeMode.setStatus('current')
agentSwitchConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8))
agentSwitchAddressAgingTimeoutTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 4), )
if mibBuilder.loadTexts: agentSwitchAddressAgingTimeoutTable.setStatus('current')
agentSwitchAddressAgingTimeoutEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 4, 1), ).setIndexNames((0, "Q-BRIDGE-MIB", "dot1qFdbId"))
if mibBuilder.loadTexts: agentSwitchAddressAgingTimeoutEntry.setStatus('current')
agentSwitchAddressAgingTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 1000000)).clone(300)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSwitchAddressAgingTimeout.setStatus('current')
agentSwitchBroadcastControlMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSwitchBroadcastControlMode.setStatus('current')
agentSwitchDot3FlowControlMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSwitchDot3FlowControlMode.setStatus('current')
agentSwitchIGMPSnoopingGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 6))
agentSwitchIGMPSnoopingAdminMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 6, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSwitchIGMPSnoopingAdminMode.setStatus('current')
agentSwitchIGMPSnoopingGroupMembershipInterval = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 6, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 3600)).clone(260)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSwitchIGMPSnoopingGroupMembershipInterval.setStatus('current')
agentSwitchIGMPSnoopingMaxResponseTime = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 6, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 3600)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSwitchIGMPSnoopingMaxResponseTime.setStatus('current')
agentSwitchIGMPSnoopingMRPExpirationTime = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 6, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 3600))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSwitchIGMPSnoopingMRPExpirationTime.setStatus('current')
agentSwitchIGMPSnoopingPortMask = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 6, 5), AgentPortMask().clone(hexValue="000000000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSwitchIGMPSnoopingPortMask.setStatus('current')
agentSwitchIGMPSnoopingMulticastControlFramesProcessed = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 6, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchIGMPSnoopingMulticastControlFramesProcessed.setStatus('current')
agentSwitchMFDBGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7))
agentSwitchMFDBTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 1), )
if mibBuilder.loadTexts: agentSwitchMFDBTable.setStatus('current')
agentSwitchMFDBEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 1, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentSwitchMFDBVlanId"), (0, "GSM7224-SWITCHING-MIB", "agentSwitchMFDBMacAddress"), (0, "GSM7224-SWITCHING-MIB", "agentSwitchMFDBProtocolType"))
if mibBuilder.loadTexts: agentSwitchMFDBEntry.setStatus('current')
agentSwitchMFDBVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 1, 1, 1), VlanIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBVlanId.setStatus('current')
agentSwitchMFDBMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 1, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBMacAddress.setStatus('current')
agentSwitchMFDBProtocolType = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("static", 1), ("gmrp", 2), ("igmp", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBProtocolType.setStatus('current')
agentSwitchMFDBType = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("static", 1), ("dynamic", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBType.setStatus('current')
agentSwitchMFDBDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBDescription.setStatus('current')
agentSwitchMFDBForwardingPortMask = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 1, 1, 6), AgentPortMask()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBForwardingPortMask.setStatus('current')
agentSwitchMFDBFilteringPortMask = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 1, 1, 7), AgentPortMask()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBFilteringPortMask.setStatus('current')
agentSwitchMFDBSummaryTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 2), )
if mibBuilder.loadTexts: agentSwitchMFDBSummaryTable.setStatus('current')
agentSwitchMFDBSummaryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 2, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentSwitchMFDBSummaryVlanId"), (0, "GSM7224-SWITCHING-MIB", "agentSwitchMFDBSummaryMacAddress"))
if mibBuilder.loadTexts: agentSwitchMFDBSummaryEntry.setStatus('current')
agentSwitchMFDBSummaryVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 2, 1, 1), VlanIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBSummaryVlanId.setStatus('current')
agentSwitchMFDBSummaryMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 2, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBSummaryMacAddress.setStatus('current')
agentSwitchMFDBSummaryForwardingPortMask = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 2, 1, 3), AgentPortMask()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBSummaryForwardingPortMask.setStatus('current')
agentSwitchMFDBMaxTableEntries = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBMaxTableEntries.setStatus('current')
agentSwitchMFDBMostEntriesUsed = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBMostEntriesUsed.setStatus('current')
agentSwitchMFDBCurrentEntries = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 8, 7, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSwitchMFDBCurrentEntries.setStatus('current')
agentTransferConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9))
agentTransferUploadGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 1))
agentTransferUploadMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("tftp", 1), ("xmodem", 2), ("ymodem", 3), ("zmodem", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferUploadMode.setStatus('current')
agentTransferUploadServerIP = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferUploadServerIP.setStatus('current')
agentTransferUploadPath = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferUploadPath.setStatus('current')
agentTransferUploadFilename = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferUploadFilename.setStatus('current')
agentTransferUploadDataType = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5))).clone(namedValues=NamedValues(("config", 2), ("errorlog", 3), ("messagelog", 4), ("traplog", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferUploadDataType.setStatus('current')
agentTransferUploadStart = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferUploadStart.setStatus('current')
agentTransferUploadStatus = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("notInitiated", 1), ("transferStarting", 2), ("errorStarting", 3), ("wrongFileType", 4), ("updatingConfig", 5), ("invalidConfigFile", 6), ("writingToFlash", 7), ("failureWritingToFlash", 8), ("checkingCRC", 9), ("failedCRC", 10), ("unknownDirection", 11), ("transferSuccessful", 12), ("transferFailed", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentTransferUploadStatus.setStatus('current')
agentTransferDownloadGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 2))
agentTransferDownloadMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("tftp", 1), ("xmodem", 2), ("ymodem", 3), ("zmodem", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferDownloadMode.setStatus('current')
agentTransferDownloadServerIP = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 2, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferDownloadServerIP.setStatus('current')
agentTransferDownloadPath = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 2, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferDownloadPath.setStatus('current')
agentTransferDownloadFilename = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 2, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferDownloadFilename.setStatus('current')
agentTransferDownloadDataType = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("code", 2), ("config", 3), ("sshkey-rsa1", 4), ("sshkey-rsa2", 5), ("sshkey-dsa", 6), ("sslpem-root", 7), ("sslpem-server", 8), ("sslpem-dhweak", 9), ("sslpem-dhstrong", 10)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferDownloadDataType.setStatus('current')
agentTransferDownloadStart = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 2, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentTransferDownloadStart.setStatus('current')
agentTransferDownloadStatus = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 9, 2, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("notInitiated", 1), ("transferStarting", 2), ("errorStarting", 3), ("wrongFileType", 4), ("updatingConfig", 5), ("invalidConfigFile", 6), ("writingToFlash", 7), ("failureWritingToFlash", 8), ("checkingCRC", 9), ("failedCRC", 10), ("unknownDirection", 11), ("transferSuccessful", 12), ("transferFailed", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentTransferDownloadStatus.setStatus('current')
agentPortMirroringGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 10))
agentMirroredPortIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 10, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentMirroredPortIfIndex.setStatus('current')
agentProbePortIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 10, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentProbePortIfIndex.setStatus('current')
agentPortMirroringMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 10, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("delete", 3))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortMirroringMode.setStatus('current')
agentDot3adAggPortTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 12), )
if mibBuilder.loadTexts: agentDot3adAggPortTable.setStatus('current')
agentDot3adAggPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 12, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentDot3adAggPort"))
if mibBuilder.loadTexts: agentDot3adAggPortEntry.setStatus('current')
agentDot3adAggPort = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 12, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentDot3adAggPort.setStatus('current')
agentDot3adAggPortLACPMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 12, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentDot3adAggPortLACPMode.setStatus('current')
agentPortConfigTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13), )
if mibBuilder.loadTexts: agentPortConfigTable.setStatus('current')
agentPortConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentPortDot1dBasePort"))
if mibBuilder.loadTexts: agentPortConfigEntry.setStatus('current')
agentPortDot1dBasePort = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentPortDot1dBasePort.setStatus('current')
agentPortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentPortIfIndex.setStatus('current')
agentPortIanaType = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 3), IANAifType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentPortIanaType.setStatus('current')
agentPortSTPMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("dot1d", 1), ("fast", 2), ("off", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortSTPMode.setStatus('current')
agentPortSTPState = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("blocking", 1), ("listening", 2), ("learning", 3), ("forwarding", 4), ("disabled", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentPortSTPState.setStatus('current')
agentPortAdminMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortAdminMode.setStatus('current')
agentPortPhysicalMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("auto-negotiate", 1), ("half-10", 2), ("full-10", 3), ("half-100", 4), ("full-100", 5), ("half-100fx", 6), ("full-100fx", 7), ("full-1000sx", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortPhysicalMode.setStatus('obsolete')
agentPortPhysicalStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("auto-negotiate", 1), ("half-10", 2), ("full-10", 3), ("half-100", 4), ("full-100", 5), ("half-100fx", 6), ("full-100fx", 7), ("full-1000sx", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentPortPhysicalStatus.setStatus('obsolete')
agentPortLinkTrapMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortLinkTrapMode.setStatus('current')
agentPortClearStats = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortClearStats.setStatus('current')
agentPortDefaultType = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 11), ObjectIdentifier()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortDefaultType.setStatus('current')
agentPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 12), ObjectIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentPortType.setStatus('current')
agentPortAutoNegAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortAutoNegAdminStatus.setStatus('current')
agentPortDot3FlowControlMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortDot3FlowControlMode.setStatus('current')
agentPortDVlanTagMode = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortDVlanTagMode.setStatus('current')
agentPortDVlanTagEthertype = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortDVlanTagEthertype.setStatus('current')
agentPortDVlanTagCustomerId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 17), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortDVlanTagCustomerId.setStatus('current')
agentPortMaxFrameSizeLimit = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentPortMaxFrameSizeLimit.setStatus('current')
agentPortMaxFrameSize = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 13, 1, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentPortMaxFrameSize.setStatus('current')
agentProtocolConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14))
agentProtocolGroupCreate = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentProtocolGroupCreate.setStatus('current')
agentProtocolGroupTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 2), )
if mibBuilder.loadTexts: agentProtocolGroupTable.setStatus('current')
agentProtocolGroupEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 2, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentProtocolGroupId"))
if mibBuilder.loadTexts: agentProtocolGroupEntry.setStatus('current')
agentProtocolGroupId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentProtocolGroupId.setStatus('current')
agentProtocolGroupName = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentProtocolGroupName.setStatus('current')
agentProtocolGroupVlanId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 2, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentProtocolGroupVlanId.setStatus('current')
agentProtocolGroupProtocolIP = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentProtocolGroupProtocolIP.setStatus('current')
agentProtocolGroupProtocolARP = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentProtocolGroupProtocolARP.setStatus('current')
agentProtocolGroupProtocolIPX = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentProtocolGroupProtocolIPX.setStatus('current')
agentProtocolGroupStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 2, 1, 7), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentProtocolGroupStatus.setStatus('current')
agentProtocolGroupPortTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 3), )
if mibBuilder.loadTexts: agentProtocolGroupPortTable.setStatus('current')
agentProtocolGroupPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 3, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentProtocolGroupId"), (0, "GSM7224-SWITCHING-MIB", "agentProtocolGroupPortIfIndex"))
if mibBuilder.loadTexts: agentProtocolGroupPortEntry.setStatus('current')
agentProtocolGroupPortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentProtocolGroupPortIfIndex.setStatus('current')
agentProtocolGroupPortStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 14, 3, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: agentProtocolGroupPortStatus.setStatus('current')
agentStpSwitchConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15))
agentStpConfigDigestKey = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpConfigDigestKey.setStatus('current')
agentStpConfigFormatSelector = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpConfigFormatSelector.setStatus('current')
agentStpConfigName = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpConfigName.setStatus('current')
agentStpConfigRevision = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpConfigRevision.setStatus('current')
agentStpForceVersion = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("dot1d", 1), ("dot1w", 2), ("dot1s", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpForceVersion.setStatus('current')
agentStpAdminMode = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpAdminMode.setStatus('current')
agentStpPortTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7), )
if mibBuilder.loadTexts: agentStpPortTable.setStatus('current')
agentStpPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: agentStpPortEntry.setStatus('current')
agentStpPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpPortState.setStatus('current')
agentStpPortStatsMstpBpduRx = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpPortStatsMstpBpduRx.setStatus('current')
agentStpPortStatsMstpBpduTx = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpPortStatsMstpBpduTx.setStatus('current')
agentStpPortStatsRstpBpduRx = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpPortStatsRstpBpduRx.setStatus('current')
agentStpPortStatsRstpBpduTx = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpPortStatsRstpBpduTx.setStatus('current')
agentStpPortStatsStpBpduRx = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpPortStatsStpBpduRx.setStatus('current')
agentStpPortStatsStpBpduTx = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpPortStatsStpBpduTx.setStatus('current')
agentStpPortUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1, 8), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpPortUpTime.setStatus('current')
agentStpPortMigrationCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 7, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("false", 0), ("true", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpPortMigrationCheck.setStatus('current')
agentStpCstConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8))
agentStpCstHelloTime = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstHelloTime.setStatus('current')
agentStpCstMaxAge = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstMaxAge.setStatus('current')
agentStpCstRegionalRootId = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstRegionalRootId.setStatus('current')
agentStpCstRegionalRootPathCost = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstRegionalRootPathCost.setStatus('current')
agentStpCstRootFwdDelay = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstRootFwdDelay.setStatus('current')
agentStpCstBridgeFwdDelay = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8, 6), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(4, 30)).clone(15)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpCstBridgeFwdDelay.setStatus('current')
agentStpCstBridgeHelloTime = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpCstBridgeHelloTime.setStatus('current')
agentStpCstBridgeHoldTime = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstBridgeHoldTime.setStatus('current')
agentStpCstBridgeMaxAge = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 8, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(6, 40)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpCstBridgeMaxAge.setStatus('current')
agentStpCstPortTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9), )
if mibBuilder.loadTexts: agentStpCstPortTable.setStatus('current')
agentStpCstPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: agentStpCstPortEntry.setStatus('current')
agentStpCstPortOperEdge = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstPortOperEdge.setStatus('current')
agentStpCstPortOperPointToPoint = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstPortOperPointToPoint.setStatus('current')
agentStpCstPortTopologyChangeAck = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstPortTopologyChangeAck.setStatus('current')
agentStpCstPortEdge = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpCstPortEdge.setStatus('current')
agentStpCstPortForwardingState = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("discarding", 1), ("learning", 2), ("forwarding", 3), ("disabled", 4), ("manualFwd", 5), ("notParticipate", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstPortForwardingState.setStatus('current')
agentStpCstPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstPortId.setStatus('current')
agentStpCstPortPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 200000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpCstPortPathCost.setStatus('current')
agentStpCstPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 8), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 240)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpCstPortPriority.setStatus('current')
agentStpCstDesignatedBridgeId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstDesignatedBridgeId.setStatus('current')
agentStpCstDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 10), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstDesignatedCost.setStatus('current')
agentStpCstDesignatedPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 9, 1, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpCstDesignatedPortId.setStatus('current')
agentStpMstTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10), )
if mibBuilder.loadTexts: agentStpMstTable.setStatus('current')
agentStpMstEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentStpMstId"))
if mibBuilder.loadTexts: agentStpMstEntry.setStatus('current')
agentStpMstId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstId.setStatus('current')
agentStpMstBridgePriority = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 61440))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpMstBridgePriority.setStatus('current')
agentStpMstBridgeIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstBridgeIdentifier.setStatus('current')
agentStpMstDesignatedRootId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstDesignatedRootId.setStatus('current')
agentStpMstRootPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstRootPathCost.setStatus('current')
agentStpMstRootPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstRootPortId.setStatus('current')
agentStpMstTimeSinceTopologyChange = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 7), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstTimeSinceTopologyChange.setStatus('current')
agentStpMstTopologyChangeCount = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstTopologyChangeCount.setStatus('current')
agentStpMstTopologyChangeParm = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstTopologyChangeParm.setStatus('current')
agentStpMstRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 10, 1, 10), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: agentStpMstRowStatus.setStatus('current')
agentStpMstPortTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 11), )
if mibBuilder.loadTexts: agentStpMstPortTable.setStatus('current')
agentStpMstPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 11, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentStpMstId"), (0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: agentStpMstPortEntry.setStatus('current')
agentStpMstPortForwardingState = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 11, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("discarding", 1), ("learning", 2), ("forwarding", 3), ("disabled", 4), ("manualFwd", 5), ("notParticipate", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstPortForwardingState.setStatus('current')
agentStpMstPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 11, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstPortId.setStatus('current')
agentStpMstPortPathCost = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 11, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 200000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpMstPortPathCost.setStatus('current')
agentStpMstPortPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 11, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 240)).clone(128)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentStpMstPortPriority.setStatus('current')
agentStpMstDesignatedBridgeId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 11, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstDesignatedBridgeId.setStatus('current')
agentStpMstDesignatedCost = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 11, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstDesignatedCost.setStatus('current')
agentStpMstDesignatedPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 11, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStpMstDesignatedPortId.setStatus('current')
agentStpMstVlanTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 12), )
if mibBuilder.loadTexts: agentStpMstVlanTable.setStatus('current')
agentStpMstVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 12, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentStpMstId"), (0, "Q-BRIDGE-MIB", "dot1qVlanIndex"))
if mibBuilder.loadTexts: agentStpMstVlanEntry.setStatus('current')
agentStpMstVlanRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 15, 12, 1, 1), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: agentStpMstVlanRowStatus.setStatus('current')
agentAuthenticationGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16))
agentAuthenticationListCreate = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentAuthenticationListCreate.setStatus('current')
agentAuthenticationListTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 2), )
if mibBuilder.loadTexts: agentAuthenticationListTable.setStatus('current')
agentAuthenticationListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 2, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentAuthenticationListIndex"))
if mibBuilder.loadTexts: agentAuthenticationListEntry.setStatus('current')
agentAuthenticationListIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 2, 1, 1), Unsigned32())
if mibBuilder.loadTexts: agentAuthenticationListIndex.setStatus('current')
agentAuthenticationListName = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentAuthenticationListName.setStatus('current')
agentAuthenticationListMethod1 = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("local", 1), ("radius", 2), ("reject", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentAuthenticationListMethod1.setStatus('current')
agentAuthenticationListMethod2 = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("undefined", 1), ("local", 2), ("radius", 3), ("reject", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentAuthenticationListMethod2.setStatus('current')
agentAuthenticationListMethod3 = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("undefined", 1), ("local", 2), ("radius", 3), ("reject", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentAuthenticationListMethod3.setStatus('current')
agentAuthenticationListStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 2, 1, 6), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentAuthenticationListStatus.setStatus('current')
agentUserConfigDefaultAuthenticationList = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserConfigDefaultAuthenticationList.setStatus('current')
agentUserAuthenticationConfigTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 4), )
if mibBuilder.loadTexts: agentUserAuthenticationConfigTable.setStatus('current')
agentUserAuthenticationConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 4, 1), )
agentUserConfigEntry.registerAugmentions(("GSM7224-SWITCHING-MIB", "agentUserAuthenticationConfigEntry"))
agentUserAuthenticationConfigEntry.setIndexNames(*agentUserConfigEntry.getIndexNames())
if mibBuilder.loadTexts: agentUserAuthenticationConfigEntry.setStatus('current')
agentUserAuthenticationList = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 4, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserAuthenticationList.setStatus('current')
agentUserPortConfigTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 5), )
if mibBuilder.loadTexts: agentUserPortConfigTable.setStatus('current')
agentUserPortConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 5, 1), )
agentUserConfigEntry.registerAugmentions(("GSM7224-SWITCHING-MIB", "agentUserPortConfigEntry"))
agentUserPortConfigEntry.setIndexNames(*agentUserConfigEntry.getIndexNames())
if mibBuilder.loadTexts: agentUserPortConfigEntry.setStatus('current')
agentUserPortSecurity = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 16, 5, 1, 1), AgentPortMask()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentUserPortSecurity.setStatus('current')
agentClassOfServiceGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 17))
agentClassOfServiceTable = MibTable((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 17, 1), )
if mibBuilder.loadTexts: agentClassOfServiceTable.setStatus('current')
agentClassOfServiceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 17, 1, 1), ).setIndexNames((0, "GSM7224-SWITCHING-MIB", "agentClassOfServicePriority"))
if mibBuilder.loadTexts: agentClassOfServiceEntry.setStatus('current')
agentClassOfServicePriority = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 17, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)))
if mibBuilder.loadTexts: agentClassOfServicePriority.setStatus('current')
agentClassOfServiceClass = MibTableColumn((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 2, 17, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentClassOfServiceClass.setStatus('current')
agentSystemGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3))
agentSaveConfig = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentSaveConfig.setStatus('current')
agentClearConfig = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentClearConfig.setStatus('current')
agentClearLags = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentClearLags.setStatus('current')
agentClearLoginSessions = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentClearLoginSessions.setStatus('current')
agentClearPasswords = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentClearPasswords.setStatus('current')
agentClearPortStats = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentClearPortStats.setStatus('current')
agentClearSwitchStats = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentClearSwitchStats.setStatus('current')
agentClearTrapLog = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentClearTrapLog.setStatus('current')
agentClearVlan = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentClearVlan.setStatus('current')
agentResetSystem = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentResetSystem.setStatus('current')
agentSaveConfigStatus = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 3, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notInitiated", 1), ("savingInProcess", 2), ("savingComplete", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentSaveConfigStatus.setStatus('current')
agentCableTesterGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 4))
agentCableTesterStatus = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("active", 1), ("success", 2), ("failure", 3), ("uninitialized", 4))).clone('uninitialized')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentCableTesterStatus.setStatus('current')
agentCableTesterIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 4, 2), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentCableTesterIfIndex.setStatus('current')
agentCableTesterCableStatus = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 4, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("normal", 1), ("open", 2), ("short", 3), ("unknown", 4))).clone('unknown')).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentCableTesterCableStatus.setStatus('current')
agentCableTesterMinimumCableLength = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 4, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentCableTesterMinimumCableLength.setStatus('current')
agentCableTesterMaximumCableLength = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 4, 5), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentCableTesterMaximumCableLength.setStatus('current')
agentCableTesterCableFailureLocation = MibScalar((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 4, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentCableTesterCableFailureLocation.setStatus('current')
gsm7224SwitchingTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50))
multipleUsersTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 1))
if mibBuilder.loadTexts: multipleUsersTrap.setStatus('current')
broadcastStormStartTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 2))
if mibBuilder.loadTexts: broadcastStormStartTrap.setStatus('current')
broadcastStormEndTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 3))
if mibBuilder.loadTexts: broadcastStormEndTrap.setStatus('current')
linkFailureTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 4))
if mibBuilder.loadTexts: linkFailureTrap.setStatus('current')
vlanRequestFailureTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 5)).setObjects(("Q-BRIDGE-MIB", "dot1qVlanIndex"))
if mibBuilder.loadTexts: vlanRequestFailureTrap.setStatus('current')
vlanDeleteLastTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 6)).setObjects(("Q-BRIDGE-MIB", "dot1qVlanIndex"))
if mibBuilder.loadTexts: vlanDeleteLastTrap.setStatus('current')
vlanDefaultCfgFailureTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 7)).setObjects(("Q-BRIDGE-MIB", "dot1qVlanIndex"))
if mibBuilder.loadTexts: vlanDefaultCfgFailureTrap.setStatus('current')
vlanRestoreFailureTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 8)).setObjects(("Q-BRIDGE-MIB", "dot1qVlanIndex"))
if mibBuilder.loadTexts: vlanRestoreFailureTrap.setStatus('current')
fanFailureTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 9))
if mibBuilder.loadTexts: fanFailureTrap.setStatus('current')
stpInstanceNewRootTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 10)).setObjects(("GSM7224-SWITCHING-MIB", "agentStpMstId"))
if mibBuilder.loadTexts: stpInstanceNewRootTrap.setStatus('current')
stpInstanceTopologyChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 11)).setObjects(("GSM7224-SWITCHING-MIB", "agentStpMstId"))
if mibBuilder.loadTexts: stpInstanceTopologyChangeTrap.setStatus('current')
powerSupplyStatusChangeTrap = NotificationType((1, 3, 6, 1, 4, 1, 4526, 1, 8, 1, 50, 12))
if mibBuilder.loadTexts: powerSupplyStatusChangeTrap.setStatus('current')
mibBuilder.exportSymbols("GSM7224-SWITCHING-MIB", agentStpMstVlanTable=agentStpMstVlanTable, agentSerialStopBits=agentSerialStopBits, agentPortDVlanTagEthertype=agentPortDVlanTagEthertype, vlanDefaultCfgFailureTrap=vlanDefaultCfgFailureTrap, agentStpCstRegionalRootPathCost=agentStpCstRegionalRootPathCost, agentProtocolGroupPortEntry=agentProtocolGroupPortEntry, agentSwitchMFDBSummaryForwardingPortMask=agentSwitchMFDBSummaryForwardingPortMask, broadcastStormStartTrap=broadcastStormStartTrap, agentProtocolGroupProtocolARP=agentProtocolGroupProtocolARP, agentLagConfigGroup=agentLagConfigGroup, agentPortMirroringMode=agentPortMirroringMode, agentClearPasswords=agentClearPasswords, agentTrapLogEntry=agentTrapLogEntry, agentServicePortConfigProtocol=agentServicePortConfigProtocol, agentStpCstPortTopologyChangeAck=agentStpCstPortTopologyChangeAck, multipleUsersTrap=multipleUsersTrap, agentSaveConfigStatus=agentSaveConfigStatus, agentSwitchMFDBProtocolType=agentSwitchMFDBProtocolType, agentSnmpLinkUpDownTrapFlag=agentSnmpLinkUpDownTrapFlag, agentLagDetailedLagIndex=agentLagDetailedLagIndex, agentStpMstPortForwardingState=agentStpMstPortForwardingState, agentInventoryGroup=agentInventoryGroup, agentProtocolGroupProtocolIP=agentProtocolGroupProtocolIP, agentCLIConfigGroup=agentCLIConfigGroup, agentMirroredPortIfIndex=agentMirroredPortIfIndex, agentLoginSessionConnectionType=agentLoginSessionConnectionType, agentPortPhysicalStatus=agentPortPhysicalStatus, agentTrapLogTotalSinceLastViewed=agentTrapLogTotalSinceLastViewed, agentProbePortIfIndex=agentProbePortIfIndex, fanFailureTrap=fanFailureTrap, agentStpCstMaxAge=agentStpCstMaxAge, agentUserConfigEntry=agentUserConfigEntry, agentSwitchMFDBMacAddress=agentSwitchMFDBMacAddress, agentSwitchMFDBForwardingPortMask=agentSwitchMFDBForwardingPortMask, agentStpCstDesignatedCost=agentStpCstDesignatedCost, agentProtocolConfigGroup=agentProtocolConfigGroup, agentClearConfig=agentClearConfig, agentPortConfigTable=agentPortConfigTable, agentLagConfigCreate=agentLagConfigCreate, agentCableTesterMaximumCableLength=agentCableTesterMaximumCableLength, agentStpCstBridgeHoldTime=agentStpCstBridgeHoldTime, agentAuthenticationListStatus=agentAuthenticationListStatus, agentPortMirroringGroup=agentPortMirroringGroup, agentTransferDownloadPath=agentTransferDownloadPath, agentSwitchIGMPSnoopingPortMask=agentSwitchIGMPSnoopingPortMask, agentDot3adAggPortLACPMode=agentDot3adAggPortLACPMode, agentPortPhysicalMode=agentPortPhysicalMode, agentLagDetailedPortStatus=agentLagDetailedPortStatus, agentSaveConfig=agentSaveConfig, agentLagSummaryDeletePort=agentLagSummaryDeletePort, stpInstanceNewRootTrap=stpInstanceNewRootTrap, agentAuthenticationListMethod1=agentAuthenticationListMethod1, agentUserAuthenticationType=agentUserAuthenticationType, agentUserIndex=agentUserIndex, agentSystemGroup=agentSystemGroup, agentSnmpConfigGroup=agentSnmpConfigGroup, agentLoginSessionIndex=agentLoginSessionIndex, agentStpMstVlanRowStatus=agentStpMstVlanRowStatus, agentPortSTPMode=agentPortSTPMode, agentSwitchDot3FlowControlMode=agentSwitchDot3FlowControlMode, agentAuthenticationListCreate=agentAuthenticationListCreate, agentLagSummaryStatus=agentLagSummaryStatus, agentStpCstBridgeFwdDelay=agentStpCstBridgeFwdDelay, agentLoginSessionSessionTime=agentLoginSessionSessionTime, agentUserAuthenticationList=agentUserAuthenticationList, agentProtocolGroupId=agentProtocolGroupId, agentNetworkJavaMode=agentNetworkJavaMode, agentClassOfServiceClass=agentClassOfServiceClass, agentStpCstPortForwardingState=agentStpCstPortForwardingState, agentStpCstPortEntry=agentStpCstPortEntry, agentInventorySoftwareVersion=agentInventorySoftwareVersion, agentServicePortIPAddress=agentServicePortIPAddress, agentTrapLogIndex=agentTrapLogIndex, agentTransferUploadStatus=agentTransferUploadStatus, agentStpPortTable=agentStpPortTable, agentSupportedMibIndex=agentSupportedMibIndex, agentTransferDownloadStatus=agentTransferDownloadStatus, agentSwitchIGMPSnoopingGroupMembershipInterval=agentSwitchIGMPSnoopingGroupMembershipInterval, agentPortLinkTrapMode=agentPortLinkTrapMode, powerSupplyStatusChangeTrap=powerSupplyStatusChangeTrap, agentLoginSessionTable=agentLoginSessionTable, stpInstanceTopologyChangeTrap=stpInstanceTopologyChangeTrap, agentTelnetLoginTimeout=agentTelnetLoginTimeout, agentLagSummaryConfigTable=agentLagSummaryConfigTable, agentStpPortStatsMstpBpduRx=agentStpPortStatsMstpBpduRx, agentClassOfServiceTable=agentClassOfServiceTable, agentSpanningTreeConfigGroup=agentSpanningTreeConfigGroup, agentUserPortConfigEntry=agentUserPortConfigEntry, agentTransferUploadDataType=agentTransferUploadDataType, agentStpCstPortOperEdge=agentStpCstPortOperEdge, vlanRequestFailureTrap=vlanRequestFailureTrap, agentProtocolGroupEntry=agentProtocolGroupEntry, agentUserAuthenticationConfigTable=agentUserAuthenticationConfigTable, agentStpPortStatsStpBpduTx=agentStpPortStatsStpBpduTx, agentLoginSessionEntry=agentLoginSessionEntry, agentNetworkWebMode=agentNetworkWebMode, agentPortMaxFrameSizeLimit=agentPortMaxFrameSizeLimit, agentAuthenticationGroup=agentAuthenticationGroup, agentCableTesterStatus=agentCableTesterStatus, agentSnmpCommunityIndex=agentSnmpCommunityIndex, agentNetworkIPAddress=agentNetworkIPAddress, agentStpMstDesignatedRootId=agentStpMstDesignatedRootId, agentStpCstPortPriority=agentStpCstPortPriority, agentInventoryAdditionalPackages=agentInventoryAdditionalPackages, agentSnmpTrapReceiverConfigTable=agentSnmpTrapReceiverConfigTable, agentClassOfServicePriority=agentClassOfServicePriority, agentTrapLogTotal=agentTrapLogTotal, agentStpCstPortTable=agentStpCstPortTable, agentStpMstPortPriority=agentStpMstPortPriority, agentPortDot3FlowControlMode=agentPortDot3FlowControlMode, agentUserPortSecurity=agentUserPortSecurity, agentSnmpCommunityName=agentSnmpCommunityName, agentTransferUploadServerIP=agentTransferUploadServerIP, agentSwitchMFDBSummaryMacAddress=agentSwitchMFDBSummaryMacAddress, agentPortAutoNegAdminStatus=agentPortAutoNegAdminStatus, agentStpMstEntry=agentStpMstEntry, agentPortDVlanTagCustomerId=agentPortDVlanTagCustomerId, agentSwitchMFDBMostEntriesUsed=agentSwitchMFDBMostEntriesUsed, agentSupportedMibTable=agentSupportedMibTable, agentTrapLogTable=agentTrapLogTable, agentInventorySysDescription=agentInventorySysDescription, agentProtocolGroupPortStatus=agentProtocolGroupPortStatus, agentStpCstRootFwdDelay=agentStpCstRootFwdDelay, agentClearLoginSessions=agentClearLoginSessions, agentSnmpCommunityCreate=agentSnmpCommunityCreate, agentSwitchMFDBDescription=agentSwitchMFDBDescription, agentLagSummaryAdminMode=agentLagSummaryAdminMode, agentLagSummaryType=agentLagSummaryType, agentTransferDownloadGroup=agentTransferDownloadGroup, agentNetworkConfigProtocol=agentNetworkConfigProtocol, agentStpMstPortEntry=agentStpMstPortEntry, agentNetworkDefaultGateway=agentNetworkDefaultGateway, agentSwitchMFDBSummaryTable=agentSwitchMFDBSummaryTable, agentPortClearStats=agentPortClearStats, agentLagDetailedConfigTable=agentLagDetailedConfigTable, agentStpSwitchConfigGroup=agentStpSwitchConfigGroup, agentStpMstTopologyChangeCount=agentStpMstTopologyChangeCount, agentLagSummaryLinkTrap=agentLagSummaryLinkTrap, agentProtocolGroupPortTable=agentProtocolGroupPortTable, agentStpCstPortPathCost=agentStpCstPortPathCost, agentSwitchBroadcastControlMode=agentSwitchBroadcastControlMode, agentSwitchAddressAgingTimeout=agentSwitchAddressAgingTimeout, agentPortType=agentPortType, agentSwitchMFDBVlanId=agentSwitchMFDBVlanId, agentSupportedMibDescription=agentSupportedMibDescription, agentStpMstRootPathCost=agentStpMstRootPathCost, agentInfoGroup=agentInfoGroup, agentProtocolGroupTable=agentProtocolGroupTable, agentServicePortConfigGroup=agentServicePortConfigGroup, agentSwitchIGMPSnoopingAdminMode=agentSwitchIGMPSnoopingAdminMode, agentClearSwitchStats=agentClearSwitchStats, agentSerialCharacterSize=agentSerialCharacterSize, agentPortMaxFrameSize=agentPortMaxFrameSize, agentConfigGroup=agentConfigGroup, agentStpCstPortId=agentStpCstPortId, agentStpMstPortTable=agentStpMstPortTable, agentStpCstBridgeMaxAge=agentStpCstBridgeMaxAge, agentStpMstRowStatus=agentStpMstRowStatus, vlanDeleteLastTrap=vlanDeleteLastTrap, agentNetworkConfigGroup=agentNetworkConfigGroup, agentProtocolGroupPortIfIndex=agentProtocolGroupPortIfIndex, agentPortConfigEntry=agentPortConfigEntry, agentAuthenticationListMethod3=agentAuthenticationListMethod3, agentSnmpCommunityIPMask=agentSnmpCommunityIPMask, agentSwitchMFDBMaxTableEntries=agentSwitchMFDBMaxTableEntries, agentSerialParityType=agentSerialParityType, agentSnmpTrapFlagsConfigGroup=agentSnmpTrapFlagsConfigGroup, gsm7224Switching=gsm7224Switching, agentSwitchMFDBEntry=agentSwitchMFDBEntry, agentStpConfigDigestKey=agentStpConfigDigestKey, agentServicePortSubnetMask=agentServicePortSubnetMask, agentProtocolGroupProtocolIPX=agentProtocolGroupProtocolIPX, agentStpMstPortPathCost=agentStpMstPortPathCost, agentLagDetailedIfIndex=agentLagDetailedIfIndex, agentCableTesterGroup=agentCableTesterGroup, agentTransferUploadFilename=agentTransferUploadFilename, agentCableTesterCableStatus=agentCableTesterCableStatus, agentUserStatus=agentUserStatus, agentStpMstTable=agentStpMstTable, agentSnmpSpanningTreeTrapFlag=agentSnmpSpanningTreeTrapFlag, agentStpAdminMode=agentStpAdminMode, agentClearPortStats=agentClearPortStats, agentSupportedMibEntry=agentSupportedMibEntry, agentStpMstBridgeIdentifier=agentStpMstBridgeIdentifier, agentCableTesterIfIndex=agentCableTesterIfIndex, agentLagDetailedConfigEntry=agentLagDetailedConfigEntry, agentTransferUploadMode=agentTransferUploadMode, agentStpMstVlanEntry=agentStpMstVlanEntry, agentStpPortState=agentStpPortState, agentAuthenticationListEntry=agentAuthenticationListEntry, agentPortDefaultType=agentPortDefaultType, agentTransferDownloadDataType=agentTransferDownloadDataType, vlanRestoreFailureTrap=vlanRestoreFailureTrap, agentStpCstConfigGroup=agentStpCstConfigGroup, agentClearLags=agentClearLags, agentSwitchMFDBGroup=agentSwitchMFDBGroup, agentServicePortBurnedInMacAddress=agentServicePortBurnedInMacAddress, agentDot3adAggPort=agentDot3adAggPort, agentStpPortStatsMstpBpduTx=agentStpPortStatsMstpBpduTx, agentSupportedMibName=agentSupportedMibName, agentSnmpMultipleUsersTrapFlag=agentSnmpMultipleUsersTrapFlag, agentTransferUploadPath=agentTransferUploadPath, agentStpMstDesignatedBridgeId=agentStpMstDesignatedBridgeId, agentClassOfServiceEntry=agentClassOfServiceEntry, agentLagSummaryAddPort=agentLagSummaryAddPort, agentTrapLogTrap=agentTrapLogTrap, agentSwitchAddressAgingTimeoutTable=agentSwitchAddressAgingTimeoutTable, agentStpMstTimeSinceTopologyChange=agentStpMstTimeSinceTopologyChange, agentTransferDownloadServerIP=agentTransferDownloadServerIP, agentStpMstPortId=agentStpMstPortId, agentSnmpTrapReceiverIPAddress=agentSnmpTrapReceiverIPAddress, agentCableTesterMinimumCableLength=agentCableTesterMinimumCableLength, agentStpCstRegionalRootId=agentStpCstRegionalRootId, agentStpCstHelloTime=agentStpCstHelloTime, agentStpPortUpTime=agentStpPortUpTime, agentStpConfigFormatSelector=agentStpConfigFormatSelector, agentTrapLogGroup=agentTrapLogGroup, agentPortIanaType=agentPortIanaType, agentStpMstBridgePriority=agentStpMstBridgePriority, agentInventoryBurnedInMacAddress=agentInventoryBurnedInMacAddress, agentTrapLogSystemTime=agentTrapLogSystemTime, agentLoginSessionStatus=agentLoginSessionStatus, agentUserConfigDefaultAuthenticationList=agentUserConfigDefaultAuthenticationList, agentLagSummaryName=agentLagSummaryName, agentSwitchIGMPSnoopingMulticastControlFramesProcessed=agentSwitchIGMPSnoopingMulticastControlFramesProcessed, agentStpMstRootPortId=agentStpMstRootPortId, agentSerialGroup=agentSerialGroup, agentServicePortDefaultGateway=agentServicePortDefaultGateway, agentSwitchMFDBType=agentSwitchMFDBType, agentProtocolGroupCreate=agentProtocolGroupCreate, agentAuthenticationListName=agentAuthenticationListName, agentSwitchMFDBTable=agentSwitchMFDBTable, agentSerialTimeout=agentSerialTimeout, agentUserAuthenticationConfigEntry=agentUserAuthenticationConfigEntry, agentStpCstBridgeHelloTime=agentStpCstBridgeHelloTime, agentSnmpTrapReceiverCreate=agentSnmpTrapReceiverCreate, agentSnmpTrapReceiverConfigEntry=agentSnmpTrapReceiverConfigEntry, agentLagDetailedPortSpeed=agentLagDetailedPortSpeed, agentSwitchAddressAgingTimeoutEntry=agentSwitchAddressAgingTimeoutEntry, agentStpCstDesignatedBridgeId=agentStpCstDesignatedBridgeId, agentSnmpCommunityAccessMode=agentSnmpCommunityAccessMode, agentPortIfIndex=agentPortIfIndex, agentAuthenticationListMethod2=agentAuthenticationListMethod2, agentSwitchMFDBSummaryEntry=agentSwitchMFDBSummaryEntry, agentAuthenticationListTable=agentAuthenticationListTable, agentStpCstDesignatedPortId=agentStpCstDesignatedPortId, agentSnmpCommunityIPAddress=agentSnmpCommunityIPAddress, agentPortDot1dBasePort=agentPortDot1dBasePort, agentNetworkBurnedInMacAddress=agentNetworkBurnedInMacAddress, agentTelnetConfigGroup=agentTelnetConfigGroup, agentSwitchIGMPSnoopingMRPExpirationTime=agentSwitchIGMPSnoopingMRPExpirationTime, agentLoginSessionIPAddress=agentLoginSessionIPAddress, agentSwitchIGMPSnoopingMaxResponseTime=agentSwitchIGMPSnoopingMaxResponseTime, gsm7224SwitchingTraps=gsm7224SwitchingTraps)
mibBuilder.exportSymbols("GSM7224-SWITCHING-MIB", agentTransferConfigGroup=agentTransferConfigGroup, agentTransferDownloadStart=agentTransferDownloadStart, agentDot3adAggPortEntry=agentDot3adAggPortEntry, agentSnmpAuthenticationTrapFlag=agentSnmpAuthenticationTrapFlag, agentStpConfigRevision=agentStpConfigRevision, agentLoginSessionIdleTime=agentLoginSessionIdleTime, agentUserConfigTable=agentUserConfigTable, agentAuthenticationListIndex=agentAuthenticationListIndex, agentStpCstPortOperPointToPoint=agentStpCstPortOperPointToPoint, agentStpMstTopologyChangeParm=agentStpMstTopologyChangeParm, agentSwitchMFDBFilteringPortMask=agentSwitchMFDBFilteringPortMask, agentUserConfigGroup=agentUserConfigGroup, agentStpPortEntry=agentStpPortEntry, agentUserPassword=agentUserPassword, agentStpConfigName=agentStpConfigName, agentStpMstDesignatedPortId=agentStpMstDesignatedPortId, agentTelnetAllowNewMode=agentTelnetAllowNewMode, agentTransferUploadGroup=agentTransferUploadGroup, agentLoginSessionUserName=agentLoginSessionUserName, agentSnmpBroadcastStormTrapFlag=agentSnmpBroadcastStormTrapFlag, agentSnmpCommunityConfigTable=agentSnmpCommunityConfigTable, agentNetworkMgmtVlan=agentNetworkMgmtVlan, agentLagSummaryStpMode=agentLagSummaryStpMode, agentSpanningTreeMode=agentSpanningTreeMode, broadcastStormEndTrap=broadcastStormEndTrap, agentStpForceVersion=agentStpForceVersion, agentSnmpCommunityStatus=agentSnmpCommunityStatus, agentCableTesterCableFailureLocation=agentCableTesterCableFailureLocation, agentSerialHWFlowControlMode=agentSerialHWFlowControlMode, agentUserName=agentUserName, agentStpPortStatsStpBpduRx=agentStpPortStatsStpBpduRx, agentClearTrapLog=agentClearTrapLog, agentStpMstId=agentStpMstId, agentStpCstPortEdge=agentStpCstPortEdge, agentProtocolGroupStatus=agentProtocolGroupStatus, agentLagSummaryFlushTimer=agentLagSummaryFlushTimer, agentSnmpCommunityConfigEntry=agentSnmpCommunityConfigEntry, agentStpPortStatsRstpBpduTx=agentStpPortStatsRstpBpduTx, agentLagSummaryLagIndex=agentLagSummaryLagIndex, agentStpPortStatsRstpBpduRx=agentStpPortStatsRstpBpduRx, agentSwitchConfigGroup=agentSwitchConfigGroup, agentTransferUploadStart=agentTransferUploadStart, agentProtocolGroupName=agentProtocolGroupName, linkFailureTrap=linkFailureTrap, agentInventoryMachineType=agentInventoryMachineType, agentTransferDownloadFilename=agentTransferDownloadFilename, PYSNMP_MODULE_ID=gsm7224Switching, agentSnmpTrapReceiverStatus=agentSnmpTrapReceiverStatus, agentResetSystem=agentResetSystem, agentTelnetMaxSessions=agentTelnetMaxSessions, agentSnmpTrapReceiverCommunityName=agentSnmpTrapReceiverCommunityName, agentStpMstDesignatedCost=agentStpMstDesignatedCost, agentClearVlan=agentClearVlan, agentTransferDownloadMode=agentTransferDownloadMode, agentPortDVlanTagMode=agentPortDVlanTagMode, agentLagConfigStaticCapability=agentLagConfigStaticCapability, agentUserPortConfigTable=agentUserPortConfigTable, agentStpPortMigrationCheck=agentStpPortMigrationCheck, agentSwitchMFDBSummaryVlanId=agentSwitchMFDBSummaryVlanId, agentPortSTPState=agentPortSTPState, agentClassOfServiceGroup=agentClassOfServiceGroup, agentProtocolGroupVlanId=agentProtocolGroupVlanId, agentSwitchIGMPSnoopingGroup=agentSwitchIGMPSnoopingGroup, agentUserConfigCreate=agentUserConfigCreate, agentDot3adAggPortTable=agentDot3adAggPortTable, agentSnmpTrapReceiverIndex=agentSnmpTrapReceiverIndex, agentUserEncryptionType=agentUserEncryptionType, agentSerialBaudrate=agentSerialBaudrate, agentPortAdminMode=agentPortAdminMode, agentUserEncryptionPassword=agentUserEncryptionPassword, agentLagSummaryConfigEntry=agentLagSummaryConfigEntry, agentSwitchMFDBCurrentEntries=agentSwitchMFDBCurrentEntries, agentNetworkSubnetMask=agentNetworkSubnetMask, agentUserAccessMode=agentUserAccessMode)
| 148.101382
| 13,245
| 0.770842
|
889d8e55e6004763472005ec1306025a2102db36
| 1,167
|
py
|
Python
|
aliyun-python-sdk-csb/aliyunsdkcsb/request/v20171118/UpdateOrderListRequest.py
|
liusc27/aliyun-openapi-python-sdk
|
5e3db3535dd21de987dc5981e71151327d5a884f
|
[
"Apache-2.0"
] | 1
|
2019-12-23T12:36:43.000Z
|
2019-12-23T12:36:43.000Z
|
aliyun-python-sdk-csb/aliyunsdkcsb/request/v20171118/UpdateOrderListRequest.py
|
liusc27/aliyun-openapi-python-sdk
|
5e3db3535dd21de987dc5981e71151327d5a884f
|
[
"Apache-2.0"
] | null | null | null |
aliyun-python-sdk-csb/aliyunsdkcsb/request/v20171118/UpdateOrderListRequest.py
|
liusc27/aliyun-openapi-python-sdk
|
5e3db3535dd21de987dc5981e71151327d5a884f
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class UpdateOrderListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CSB', '2017-11-18', 'UpdateOrderList')
self.set_protocol_type('https');
self.set_method('POST')
def get_Data(self):
return self.get_body_params().get('Data')
def set_Data(self,Data):
self.add_body_params('Data', Data)
| 36.46875
| 68
| 0.757498
|
06305fbb75d47542dc464bcf26eb581ead373fc8
| 13,050
|
py
|
Python
|
reviewboard/webapi/resources/review_group.py
|
znick/reviewboard
|
f32320b267efcdf2feff1661eabe57f99ef490a7
|
[
"MIT"
] | 1
|
2018-08-23T09:19:02.000Z
|
2018-08-23T09:19:02.000Z
|
reviewboard/webapi/resources/review_group.py
|
klpyang/reviewboard
|
d7dabf36e5b492f18048dd7084026bf99d6933c5
|
[
"MIT"
] | null | null | null |
reviewboard/webapi/resources/review_group.py
|
klpyang/reviewboard
|
d7dabf36e5b492f18048dd7084026bf99d6933c5
|
[
"MIT"
] | 1
|
2021-11-23T15:25:44.000Z
|
2021-11-23T15:25:44.000Z
|
from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.utils import six
from djblets.util.decorators import augment_method_from
from djblets.webapi.decorators import (webapi_login_required,
webapi_response_errors,
webapi_request_fields)
from djblets.webapi.errors import (DOES_NOT_EXIST, INVALID_FORM_DATA,
NOT_LOGGED_IN, PERMISSION_DENIED)
from reviewboard.reviews.models import Group
from reviewboard.webapi.base import WebAPIResource
from reviewboard.webapi.decorators import webapi_check_local_site
from reviewboard.webapi.errors import (GROUP_ALREADY_EXISTS,
INVALID_USER)
from reviewboard.webapi.resources import resources
class ReviewGroupResource(WebAPIResource):
"""Provides information on review groups.
Review groups are groups of users that can be listed as an intended
reviewer on a review request.
"""
model = Group
fields = {
'id': {
'type': int,
'description': 'The numeric ID of the review group.',
},
'name': {
'type': six.text_type,
'description': 'The short name of the group, used in the '
'reviewer list and the Dashboard.',
},
'display_name': {
'type': six.text_type,
'description': 'The human-readable name of the group, sometimes '
'used as a short description.',
},
'invite_only': {
'type': bool,
'description': 'Whether or not the group is invite-only. An '
'invite-only group is only accessible by members '
'of the group.',
},
'mailing_list': {
'type': six.text_type,
'description': 'The e-mail address that all posts on a review '
'group are sent to.',
},
'url': {
'type': six.text_type,
'description': "The URL to the user's page on the site. "
"This is deprecated and will be removed in a "
"future version.",
'deprecated_in': '2.0',
},
'absolute_url': {
'type': six.text_type,
'description': "The absolute URL to the user's page on the site.",
'added_in': '2.0',
},
'visible': {
'type': bool,
'description': 'Whether or not the group is visible to users '
'who are not members. This does not prevent users '
'from accessing the group if they know it, though.',
},
'extra_data': {
'type': dict,
'description': 'Extra data as part of the review group. '
'This can be set by the API or extensions.',
},
}
item_child_resources = [
resources.review_group_user,
]
uri_object_key = 'group_name'
uri_object_key_regex = '[A-Za-z0-9_-]+'
model_object_key = 'name'
autogenerate_etags = True
mimetype_list_resource_name = 'review-groups'
mimetype_item_resource_name = 'review-group'
allowed_methods = ('GET', 'POST', 'PUT', 'DELETE')
def has_delete_permissions(self, request, group, *args, **kwargs):
return group.is_mutable_by(request.user)
def has_modify_permissions(self, request, group):
return group.is_mutable_by(request.user)
def get_queryset(self, request, is_list=False, local_site_name=None,
*args, **kwargs):
search_q = request.GET.get('q', None)
local_site = self._get_local_site(local_site_name)
if is_list:
query = self.model.objects.accessible(request.user,
local_site=local_site)
else:
query = self.model.objects.filter(local_site=local_site)
if search_q:
q = Q(name__istartswith=search_q)
if request.GET.get('displayname', None):
q = q | Q(display_name__istartswith=search_q)
query = query.filter(q)
return query
def serialize_url_field(self, group, **kwargs):
return group.get_absolute_url()
def serialize_absolute_url_field(self, obj, request, **kwargs):
return request.build_absolute_uri(obj.get_absolute_url())
def has_access_permissions(self, request, group, *args, **kwargs):
return group.is_accessible_by(request.user)
@webapi_check_local_site
@augment_method_from(WebAPIResource)
def get(self, *args, **kwargs):
"""Retrieve information on a review group.
Some basic information on the review group is provided, including
the name, description, and mailing list (if any) that e-mails to
the group are sent to.
The group links to the list of users that are members of the group.
"""
pass
@webapi_check_local_site
@webapi_request_fields(
optional={
'q': {
'type': six.text_type,
'description': 'The string that the group name (or the '
'display name when using ``displayname``) '
'must start with in order to be included in '
'the list. This is case-insensitive.',
},
'displayname': {
'type': bool,
'description': 'Specifies whether ``q`` should also match '
'the beginning of the display name.'
},
},
allow_unknown=True
)
@augment_method_from(WebAPIResource)
def get_list(self, *args, **kwargs):
"""Retrieves the list of review groups on the site.
The list of review groups can be filtered down using the ``q`` and
``displayname`` parameters.
Setting ``q`` to a value will by default limit the results to
group names starting with that value. This is a case-insensitive
comparison.
If ``displayname`` is set to ``1``, the display names will also be
checked along with the username. ``displayname`` is ignored if ``q``
is not set.
For example, accessing ``/api/groups/?q=dev&displayname=1`` will list
any groups with a name or display name starting with ``dev``.
"""
pass
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(GROUP_ALREADY_EXISTS, INVALID_FORM_DATA,
INVALID_USER, NOT_LOGGED_IN, PERMISSION_DENIED)
@webapi_request_fields(
required={
'name': {
'type': six.text_type,
'description': 'The name of the group.',
},
'display_name': {
'type': six.text_type,
'description': 'The human-readable name of the group.',
},
},
optional={
'mailing_list': {
'type': six.text_type,
'description': 'The e-mail address that all posts on a review '
'group are sent to.',
},
'visible': {
'type': bool,
'description': 'Whether or not the group is visible to users '
'who are not members. The default is true.',
},
'invite_only': {
'type': bool,
'description': 'Whether or not the group is invite-only. '
'The default is false.',
},
},
allow_unknown=True
)
def create(self, request, name, display_name, mailing_list=None,
visible=True, invite_only=False, local_site_name=None,
extra_fields={}, *args, **kargs):
"""Creates a new review group.
This will create a brand new review group with the given name
and display name. The group will be public by default, unless
specified otherwise.
Extra data can be stored on the group for later lookup by passing
``extra_data.key_name=value``. The ``key_name`` and ``value`` can
be any valid strings. Passing a blank ``value`` will remove the key.
The ``extra_data.`` prefix is required.
"""
local_site = self._get_local_site(local_site_name)
if not self.model.objects.can_create(request.user, local_site):
return self._no_access_error(request.user)
group, is_new = self.model.objects.get_or_create(
name=name,
local_site=local_site,
defaults={
'display_name': display_name,
'mailing_list': mailing_list or '',
'visible': bool(visible),
'invite_only': bool(invite_only),
})
if not is_new:
return GROUP_ALREADY_EXISTS
if extra_fields:
self._import_extra_data(group.extra_data, extra_fields)
group.save(update_fields=['extra_data'])
return 201, {
self.item_result_key: group,
}
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, INVALID_FORM_DATA,
GROUP_ALREADY_EXISTS, NOT_LOGGED_IN,
PERMISSION_DENIED)
@webapi_request_fields(
optional={
'name': {
'type': six.text_type,
'description': 'The new name for the group.',
},
'display_name': {
'type': six.text_type,
'description': 'The human-readable name of the group.',
},
'mailing_list': {
'type': six.text_type,
'description': 'The e-mail address that all posts on a review '
'group are sent to.',
},
'visible': {
'type': bool,
'description': 'Whether or not the group is visible to users '
'who are not members.',
},
'invite_only': {
'type': bool,
'description': 'Whether or not the group is invite-only.'
},
},
allow_unknown=True
)
def update(self, request, name=None, extra_fields={}, *args, **kwargs):
"""Updates an existing review group.
All the fields of a review group can be modified, including the
name, so long as it doesn't conflict with another review group.
Extra data can be stored on the group for later lookup by passing
``extra_data.key_name=value``. The ``key_name`` and ``value`` can
be any valid strings. Passing a blank ``value`` will remove the key.
The ``extra_data.`` prefix is required.
"""
try:
group = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
if not self.has_modify_permissions(request, group):
return self._no_access_error(request.user)
if name is not None and name != group.name:
# If we're changing the group name, make sure that group doesn't
# exist.
local_site = self._get_local_site(kwargs.get('local_site_name'))
if self.model.objects.filter(name=name,
local_site=local_site).count():
return GROUP_ALREADY_EXISTS
group.name = name
for field in ("display_name", "mailing_list", "visible",
"invite_only"):
val = kwargs.get(field, None)
if val is not None:
setattr(group, field, val)
self._import_extra_data(group.extra_data, extra_fields)
group.save()
return 200, {
self.item_result_key: group,
}
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, NOT_LOGGED_IN, PERMISSION_DENIED)
def delete(self, request, *args, **kwargs):
"""Deletes a review group.
This will disassociate the group from all review requests previously
targetting the group, and permanently delete the group.
It is best to only delete empty, unused groups, and to instead
change a group to not be visible if it's on longer needed.
"""
try:
group = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
if not self.has_delete_permissions(request, group):
return self._no_access_error(request.user)
group.delete()
return 204, {}
review_group_resource = ReviewGroupResource()
| 36.657303
| 79
| 0.564138
|
26d8fbadcd900e7868f0b4515e4539daefc2e08f
| 24,943
|
py
|
Python
|
Bot/src/funhouse/music.py
|
AryamanSrii/Mecha-Karen
|
4a5c7318f8c458495eee72a13be5db8a0113ed28
|
[
"Apache-2.0"
] | 181
|
2021-05-26T17:37:40.000Z
|
2022-02-26T08:36:07.000Z
|
Bot/src/funhouse/music.py
|
AryamanSrii/Mecha-Karen
|
4a5c7318f8c458495eee72a13be5db8a0113ed28
|
[
"Apache-2.0"
] | 24
|
2021-05-14T19:47:34.000Z
|
2021-09-06T17:16:17.000Z
|
Bot/src/funhouse/music.py
|
AryamanSrii/Mecha-Karen
|
4a5c7318f8c458495eee72a13be5db8a0113ed28
|
[
"Apache-2.0"
] | 16
|
2021-07-02T09:40:56.000Z
|
2022-01-21T10:07:08.000Z
|
# !/usr/bin/python
"""
Copyright ©️: 2020 Seniatical / _-*™#7519
License: Apache 2.0
A permissive license whose main conditions require preservation of copyright and license notices.
Contributors provide an express grant of patent rights.
Licensed works, modifications, and larger works may be distributed under different terms and without source code.
FULL LICENSE CAN BE FOUND AT:
https://www.apache.org/licenses/LICENSE-2.0.html
Any violation to the license, will result in moderate action
You are legally required to mention (original author, license, source and any changes made)
"""
import re
import typing
import discord
import lavalink
from discord.ext import commands
from lavalink import format_time
import random
import asyncio
from duration import to_seconds
from requests.utils import requote_uri
from io import BytesIO
import asyncio
from itertools import accumulate
from core._.filters import filters
url_rx = re.compile(r'https?://(?:www\.)?.+')
def convert(time: int):
mins = time // 60
time %= 60
return '%d:%d' % (mins, time)
class Music(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.players = {}
self.votes = {}
self.session = __import__('aiohttp').ClientSession()
self.gains = ([(1 * (i / 100)) for i in range(101)] + [-(1 * (i / 100)) for i in range(26)])
self.filter_maps = filters
if not hasattr(bot, 'lavalink'):
bot.lavalink = lavalink.Client(740514706858442792)
env = bot.env
bot.lavalink.add_node(env('LAVALINK_SERVER_IP'), env('LAVALINK_SERVER_PORT'),
env('LAVALINK_SERVER_PASSWORD'), env('LAVALINK_REGION'),
env('LAVALINK_NODETYPE'))
bot.add_listener(bot.lavalink.voice_update_handler, 'on_socket_response')
if not bot.lavalink._event_hooks['Generic']:
lavalink.add_event_hook(self.track_hook)
def cog_unload(self) -> None:
self.bot.lavalink._event_hooks.clear()
async def cog_before_invoke(self, ctx):
guild_check = ctx.guild is not None
if guild_check and ctx.command.name not in ['lyrics', 'join']:
await self.ensure_voice(ctx)
return guild_check
async def cog_command_error(self, ctx, error) -> None:
if isinstance(error, lavalink.exceptions.NodeException):
return await ctx.send('Woops, Looks like the node is full!')
if isinstance(error, commands.errors.CommandInvokeError):
return
@staticmethod
async def convert(milliseconds: int) -> int:
seconds = milliseconds * 1000
minutes = (seconds / 60)
return int(minutes)
async def random_filter(self, as_order=True) -> list:
sync = list(range(15))
if not as_order:
random.shuffle(sync)
groups = []
for i in sync:
gain = random.choice(self.gains)
groups.append((i, gain))
return groups
async def ensure_voice(self, ctx) -> None:
player = self.bot.lavalink.player_manager.create(ctx.guild.id, endpoint=str(ctx.guild.region))
should_connect = ctx.command.name in ('play', 'join')
if not ctx.author.voice or not ctx.author.voice.channel:
ctx.command.reset_cooldown(ctx)
await ctx.message.reply(
content='You must be a voice channel inorder to use this command!',
mention_author=False)
raise commands.errors.CommandInvokeError
if not player.is_connected:
if not should_connect:
ctx.command.reset_cooldown(ctx)
await ctx.message.reply(
content='I am currently not connected to any VC.',
mention_author=False)
raise commands.errors.CommandInvokeError
permissions = ctx.author.voice.channel.permissions_for(ctx.me)
if not permissions.connect or not permissions.speak:
ctx.command.reset_cooldown(ctx)
await ctx.message.reply(
content='I am missing `CONNECT` or `SPEAK` permissions!',
mention_author=False)
raise commands.errors.CommandInvokeError
player.store('channel', ctx.channel.id)
player.store('ctx', ctx)
await self.connect_to(ctx.guild.id, str(ctx.author.voice.channel.id))
await asyncio.sleep(1) ## Kept joining way too fast.
await ctx.message.reply(
content='Connected to **%s** and bound to **%s**!' % (ctx.me.voice.channel, ctx.channel),
mention_author=False
)
else:
if int(player.channel_id) != ctx.author.voice.channel.id:
ctx.command.reset_cooldown(ctx)
await ctx.message.reply(
content='You need to be in the same vc as me!',
mention_author=False
)
raise commands.errors.CommandInvokeError
async def track_hook(self, event) -> any:
if isinstance(event, lavalink.events.QueueEndEvent):
await asyncio.sleep(30)
if event.player.is_playing:
return
guild_id = int(event.player.guild_id)
ctx = event.player.fetch('ctx')
if ctx:
try:
await ctx.send('Left **%s** because I am no longer playing anything.' % ctx.me.voice.channel)
except AttributeError:
await ctx.send('Left the channel because i am no longer playing anything.')
event.player.delete('ctx')
await self.connect_to(guild_id, None)
if isinstance(event, lavalink.events.TrackStartEvent):
ctx = event.player.fetch('ctx')
track = event.track
if ctx and not event.player.repeat:
await ctx.send('Now playing **%s** requested by **%s**' % (
track.title, ctx.guild.get_member(int(track.requester))))
if isinstance(event, lavalink.events.TrackStuckEvent):
ctx = event.player.fetch('ctx')
if ctx:
await ctx.send('An error has occured whilst playing your track!')
async def connect_to(self, guild_id: int, channel_id: typing.Union[str, None]) -> None:
ws = self.bot._connection._get_websocket(guild_id)
await ws.voice_state(str(guild_id), channel_id)
@staticmethod
def convert_to_min_and_seconds(milliseconds: int):
minutes = milliseconds // 60000
seconds = round(((milliseconds % 60000) // 1000), 0)
minutes = int(minutes)
seconds = int(seconds)
if len(str(seconds)) == 1:
seconds = "0" + str(seconds)
return f"{minutes}:{seconds}"
@staticmethod
def convert_to_milli(minute, second):
minute = int(minute) * 60000
second = int(second) * 1000
return minute + second
@staticmethod
async def pretty_convert(num) -> str:
if num >= (60 * 60):
hours = num // (60 * 60)
num %= (60 * 60)
mins = num // 60
num %= 60
return '{}:{}:{}'.format(hours, mins, num)
elif num > 60:
mins = num // 60
num %= 60
return '{}:{}'.format(mins, num)
else:
return '00:{}'.format(num)
@commands.command()
@commands.cooldown(1, 15, commands.BucketType.user)
async def join(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if ctx.author.voice and not player:
try:
await ctx.message.add_reaction('🎵')
except Exception:
pass
return await self.ensure_voice(ctx)
if ctx.author.voice and not player.is_connected:
try:
await ctx.message.add_reaction('🎵')
except Exception:
pass
return await self.ensure_voice(ctx)
try:
if player.is_connected:
return await ctx.message.reply(
content='Im already connected to %s!' % ctx.me.voice.channel.mention,
mention_author=False
)
except AttributeError:
if ctx.me.voice:
return await ctx.message.reply(
content='Im already connected to %s!' % ctx.me.voice.channel.mention,
mention_author=False
)
await ctx.message.reply(
content='You need to be connected to a voice channel inorder to use this command!',
mention_author=False
)
ctx.command.reset_cooldown(ctx)
@commands.command(aliases=['dc', 'leave'])
@commands.cooldown(1, 5, commands.BucketType.guild)
async def disconnect(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.is_connected:
return await ctx.message.reply(
content='I am not connected to any voice channels!',
mention_author=False)
if not ctx.author.voice or (player.is_connected and ctx.author.voice.channel.id != int(player.channel_id)):
return await ctx.message.reply(
content='Your not connected in the same VC as me!',
mention_author=False)
channel = ctx.me.voice.channel
player.queue.clear()
await player.reset_equalizer()
await player.set_volume(100)
player.repeat = False
await player.stop()
await self.connect_to(ctx.guild.id, None)
await ctx.message.reply(
content='Successfully disconnected from **%s**.' % channel.name,
mention_author=False)
@commands.command(aliases=['p'])
@commands.cooldown(1, 5, commands.BucketType.user)
async def play(self, ctx, *, query: str):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
query = query.strip('<>')
if query.lower().startswith('soundcloud'):
query = f'scsearch:{query.lower().split("soundcloud")[-1]}'
elif not url_rx.match(query):
query = f'ytsearch:{query}'
results = await player.node.get_tracks(query)
if not results or not results['tracks']:
return await ctx.message.reply(
content='I could not find any **videos/songs** using your search query.',
mention_author=False)
embed = discord.Embed(color=discord.Color.red())
if results['loadType'] == 'PLAYLIST_LOADED':
tracks = results['tracks']
for track in tracks:
player.add(requester=ctx.author.id, track=track)
embed.title = 'Playlist Enqueued!'
embed.description = f'{results["playlistInfo"]["name"]} - {len(tracks)} tracks'
else:
track = results['tracks'][0]
embed.description = f'[{track["info"]["title"]}]({track["info"]["uri"]})'
track = lavalink.models.AudioTrack(track, ctx.author.id, recommended=True)
player.add(requester=ctx.author.id, track=track)
if not player.is_playing:
await player.play()
await ctx.message.reply(embed=embed, mention_author=False)
@commands.command(aliases=['sc'])
@commands.cooldown(1, 5, commands.BucketType.user)
async def soundcloud(self, ctx, *, query: str):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
query = query.strip('<>')
if not url_rx.match(query):
query = f'scsearch:{query}'
results = await player.node.get_tracks(query)
if not results or not results['tracks']:
return await ctx.message.reply(
content='I could not find any **songs** using your search query.',
mention_author=False)
embed = discord.Embed(color=discord.Color.red())
if results['loadType'] == 'PLAYLIST_LOADED':
tracks = results['tracks']
for track in tracks:
player.add(requester=ctx.author.id, track=track)
embed.title = 'Playlist Enqueued!'
embed.description = f'{results["playlistInfo"]["name"]} - {len(tracks)} tracks'
else:
track = results['tracks'][0]
embed.description = f'[{track["info"]["title"]}]({track["info"]["uri"]})'
track = lavalink.models.AudioTrack(track, ctx.author.id, recommended=True)
player.add(requester=ctx.author.id, track=track)
if not player.is_playing:
await player.play()
await ctx.message.reply(embed=embed, mention_author=False)
@commands.command()
@commands.cooldown(1, 10, commands.BucketType.user)
async def queue(self, ctx, page: str = '1'):
try:
page = int(page)
except ValueError:
return await ctx.send('The page must actually be a number!')
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
queue = player.queue
if not queue and player.is_playing:
queue.insert(0, player.current)
if not queue:
return await ctx.message.reply(
content='I am currently not playing anything!',
mention_author=False)
if player.queue[0] != player.current:
queue.insert(0, player.current)
embed = discord.Embed(title='Queue ({}/{})'.format(page, (len(queue) // 10) + 1), colour=discord.Colour.red())
try:
embed.description = '\n'.join(
f'`{(i + 1)}.` [{v.title}]({v.uri})' for i, v in enumerate(queue[((page * 10) - 10):(page * 10)]))
except IndexError:
return await ctx.send('This page number cannot be found!')
await ctx.send(embed=embed)
@commands.group(invoke_without_command=True, aliases=['looping'])
@commands.cooldown(1, 20, commands.BucketType.user)
async def loop(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.repeat:
return await ctx.message.reply(
content='Looping is currently **disabled** for this track.',
mention_author=False
)
await ctx.message.reply(
content='Looping is currently **enabled** for this track.',
mention_author=False
)
@loop.command()
@commands.cooldown(1, 20, commands.BucketType.user)
async def enable(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if player.repeat:
return await ctx.message.reply(
content='This track is already being looped!',
mention_author=False
)
player.repeat = True
try:
await ctx.message.add_reaction('\U0001f501')
except Exception:
pass
await ctx.message.reply(
content='Looping has been **enabled** for this track.',
mention_author=False
)
@loop.command()
@commands.cooldown(1, 20, commands.BucketType.user)
async def disable(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.repeat:
return await ctx.message.reply(
content='This track isn\'t currently looping.',
mention_author=False
)
player.repeat = False
try:
await ctx.message.add_reaction('\U0001f502')
except Exception:
pass
await ctx.message.reply(
content='Looping has been **disabled** for this track.',
mention_author=False
)
@commands.command()
@commands.cooldown(1, 30, commands.BucketType.user)
async def shuffle(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.queue:
return await ctx.message.reply(
content='I am currently not playing anything!',
mention_author=False)
queue = player.queue
random.shuffle(queue)
player.queue = queue
await ctx.message.reply(
content='Shuffled the queue for you.',
mention_author=False)
@commands.command()
@commands.cooldown(1, 10, commands.BucketType.user)
async def skip(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.is_playing:
ctx.command.reset_cooldown(ctx)
return await ctx.message.reply(
content='I am currently not playing anything!',
mention_author=False)
await player.skip()
await ctx.message.reply(
content='Skipped the current track being played!',
mention_author=False)
@commands.command()
@commands.cooldown(1, 10, commands.BucketType.user)
async def pause(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.is_playing:
ctx.command.reset_cooldown(ctx)
return await ctx.message.reply(
content='I am currently not playing anything!',
mention_author=False)
if not player.paused:
await player.set_pause(True)
await ctx.message.add_reaction('⏸️')
await ctx.message.reply(
content='Paused the current track.',
mention_author=False)
else:
await ctx.send('This track has already been paused.')
@commands.command()
@commands.cooldown(1, 10, commands.BucketType.user)
async def unpause(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.is_playing:
ctx.command.reset_cooldown(ctx)
return await ctx.message.reply(
content='I am currently not playing anything!',
mention_author=False)
if player.paused:
await player.set_pause(False)
await ctx.message.add_reaction('⏯️')
await ctx.message.reply(
content='Resuming the current track!',
mention_author=False)
else:
await ctx.message.reply(
content='This track hasn\'t been paused.',
mention_author=False)
@commands.group(aliases=['vol'], invoke_without_command=True)
@commands.cooldown(1, 5, commands.BucketType.user)
async def volume(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
return await ctx.send('The current volume is set at **%s**' % int(player.volume / 10))
@volume.command()
@commands.cooldown(1, 5, commands.BucketType.user)
async def set(self, ctx, new_volume: str):
try:
volume = int(new_volume)
except ValueError:
ctx.command.reset_cooldown(ctx)
return await ctx.message.reply(
content='Make sure the new volume is actually a number',
mention_author=False)
if volume not in range(0, 101):
ctx.command.reset_cooldown(ctx)
return await ctx.message.reply(
content='Volume must be within the range of **0 - 100**',
mention_author=False)
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.is_playing:
ctx.command.reset_cooldown(ctx)
return await ctx.message.reply(
content='I am currently not playing anything!',
mention_author=False)
await player.set_volume(volume * 10)
try:
await ctx.message.add_reaction('📶')
except Exception:
pass
await ctx.message.reply(
content='Set the volume to **%s**.' % volume,
mention_author=False)
@volume.command()
@commands.cooldown(1, 5, commands.BucketType.user)
async def reset(self, ctx):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.is_playing:
ctx.command.reset_cooldown(ctx)
return await ctx.message.reply(
content='I am currently not playing anything!',
mention_author=False)
if player.volume == 100:
return await ctx.message.reply(
content='Volume is already set the default level!',
mention_author=False)
await player.set_volume(100)
await ctx.message.reply(
content='Volume has been set as normal again.',
mention_author=False)
""" FILTERS """
@commands.command(help='Adds a filter to your tracks.', aliases=['filters'])
@commands.cooldown(1, 10, commands.BucketType.user)
async def filter(self, ctx, *, _filter: str = None):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not _filter:
embed = discord.Embed(
colour=discord.Colour.red(),
title='List of filters',
description='It may take a few seconds for the filters to be applied to your tracks.\n\nTo apply a filter use `-Filter [FILTER-NAME]`. To remove a filter use\n`-Filter Remove` or re-use the same command as before.',
timestamp=ctx.message.created_at
)
current_eq = player.fetch('filter')
if not current_eq:
embed.set_footer(text='No filters added')
else:
embed.set_footer(text='Currently using %s filter' % current_eq.title())
embed.set_thumbnail(url=self.bot.user.avatar)
embed.add_field(name='Filters:', value="""\
Flat⠀⠀⠀⠀Boost⠀⠀⠀⠀Metal⠀⠀⠀⠀Piano
Bassboost⠀⠀⠀⠀Random
""")
return await ctx.send(embed=embed)
_filter = _filter.lower()
if _filter in ['remove', 'r']:
current_eq = player.fetch('filter')
if not current_eq:
return await ctx.send('There are currently no filters playing on your tracks!')
await player.reset_equalizer()
return await ctx.send('Removed filter **%s** from your tracks!' % current_eq.title())
if _filter == 'random':
embed = discord.Embed(title='The Random Filter', colour=discord.Colour.red())
raw_filter = await self.random_filter(random.choice([True, False, True, False, True, True]))
embed.add_field(name='Filter:', value='```\n{}\n```'.format(raw_filter))
embed.description = 'If you like this filter join our [Support Server](https://discord.gg/Q5mFhUM) and suggest it our community!'
await player.set_gains(*raw_filter)
player.store('filter', _filter)
return await ctx.send(embed=embed)
if _filter not in list(self.filter_maps.keys()):
return await ctx.send('This filter cannot be found!')
current_eq = player.fetch('filter')
if not current_eq:
await player.set_gains(*self.filter_maps.get(_filter))
player.store('filter', _filter)
return await ctx.send('Added the **%s** filter to your tracks.' % _filter.title())
if current_eq == _filter:
await player.reset_equalizer()
player.delete('filter')
return await ctx.send('Removed the filter from the tracks.')
if current_eq != _filter:
await player.set_gains(*self.filter_maps.get(_filter))
player.store('filter', _filter)
return await ctx.send('Swapped the **%s** filter with **%s**.' % (current_eq.title(), _filter.title()))
@commands.command()
@commands.cooldown(1, 10, commands.BucketType.user)
async def seek(self, ctx, *, time: str):
player = self.bot.lavalink.player_manager.get(ctx.guild.id)
if not player.is_playing:
ctx.command.reset_cooldown(ctx)
return await ctx.message.reply(
content='I am currently not playing anything!',
mention_author=False)
try:
seconds = to_seconds(time, strict=False)
except Exception:
return await ctx.send(
'Failed to parse the time, please use a valid format! And make sure it is not in negatives.')
as_milli = seconds * 1000
if as_milli > player.current.duration:
return await ctx.send('This time duration is larger than the song duration!')
await player.seek(as_milli)
return await ctx.message.reply(
content='Moved to postion **%s** of the track!' % await self.pretty_convert(int(seconds)),
mention_author=False
)
def setup(bot):
bot.add_cog(Music(bot))
| 36.789086
| 231
| 0.593313
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.