blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bcb7f98761d8fb0eedcf0ed5c76dc0b2eee84856 | b1087edbb37be5f09111be592e2d986992f82cbe | /tencentcloud/mps/v20190612/mps_client.py | f8e32d9d40586295ae74bfaee676ecc33d77523c | [
"Apache-2.0"
] | permissive | cirvin4/tencentcloud-sdk-python | 05c816b436e607ff0025d1dd96dbf8af9ce19fbd | 6efe69a30bc304a4fc0f886c8cbe8593c198256a | refs/heads/master | 2020-11-24T14:35:13.400073 | 2019-12-15T13:47:08 | 2019-12-15T13:47:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 73,386 | py | # -*- coding: utf8 -*-
# Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.common.abstract_client import AbstractClient
from tencentcloud.mps.v20190612 import models
class MpsClient(AbstractClient):
_apiVersion = '2019-06-12'
_endpoint = 'mps.tencentcloudapi.com'
def CreateAIAnalysisTemplate(self, request):
"""创建用户自定义内容分析模板,数量上限:50。
:param request: 调用CreateAIAnalysisTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateAIAnalysisTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateAIAnalysisTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("CreateAIAnalysisTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateAIAnalysisTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateAIRecognitionTemplate(self, request):
"""创建用户自定义内容识别模板,数量上限:50。
:param request: 调用CreateAIRecognitionTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateAIRecognitionTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateAIRecognitionTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("CreateAIRecognitionTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateAIRecognitionTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateAnimatedGraphicsTemplate(self, request):
"""创建用户自定义转动图模板,数量上限:16。
:param request: 调用CreateAnimatedGraphicsTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateAnimatedGraphicsTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateAnimatedGraphicsTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("CreateAnimatedGraphicsTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateAnimatedGraphicsTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateContentReviewTemplate(self, request):
"""创建用户自定义内容审核模板,数量上限:50。
:param request: 调用CreateContentReviewTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateContentReviewTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateContentReviewTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("CreateContentReviewTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateContentReviewTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateImageSpriteTemplate(self, request):
"""创建用户自定义雪碧图模板,数量上限:16。
:param request: 调用CreateImageSpriteTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateImageSpriteTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateImageSpriteTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("CreateImageSpriteTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateImageSpriteTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreatePersonSample(self, request):
"""该接口用于创建人物样本,用于通过人脸识别等技术,进行内容识别、内容审核等视频处理。
:param request: 调用CreatePersonSample所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreatePersonSampleRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreatePersonSampleResponse`
"""
try:
params = request._serialize()
body = self.call("CreatePersonSample", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreatePersonSampleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateSampleSnapshotTemplate(self, request):
"""创建用户自定义采样截图模板,数量上限:16。
:param request: 调用CreateSampleSnapshotTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateSampleSnapshotTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateSampleSnapshotTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("CreateSampleSnapshotTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateSampleSnapshotTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateSnapshotByTimeOffsetTemplate(self, request):
"""创建用户自定义指定时间点截图模板,数量上限:16。
:param request: 调用CreateSnapshotByTimeOffsetTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateSnapshotByTimeOffsetTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateSnapshotByTimeOffsetTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("CreateSnapshotByTimeOffsetTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateSnapshotByTimeOffsetTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateTranscodeTemplate(self, request):
"""创建用户自定义转码模板,数量上限:1000。
:param request: 调用CreateTranscodeTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateTranscodeTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateTranscodeTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("CreateTranscodeTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateTranscodeTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateWatermarkTemplate(self, request):
"""创建用户自定义水印模板,数量上限:1000。
:param request: 调用CreateWatermarkTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateWatermarkTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateWatermarkTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("CreateWatermarkTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateWatermarkTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateWordSamples(self, request):
"""该接口用于批量创建关键词样本,样本用于通过OCR、ASR技术,进行内容审核、内容识别等视频处理。
:param request: 调用CreateWordSamples所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateWordSamplesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateWordSamplesResponse`
"""
try:
params = request._serialize()
body = self.call("CreateWordSamples", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateWordSamplesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def CreateWorkflow(self, request):
"""对 COS 中指定 Bucket 的目录下上传的媒体文件,设置处理规则,包括:
1. 视频转码(带水印);
2. 视频转动图;
3. 对视频按指定时间点截图;
4. 对视频采样截图;
5. 对视频截图雪碧图;
6. 对视频转自适应码流;
7. 智能内容审核(鉴黄、鉴恐、鉴政);
8. 智能内容分析(标签、分类、封面、按帧标签);
9. 智能内容识别(人脸、文本全文、文本关键词、语音全文、语音关键词)。
注意:创建工作流成功后是禁用状态,需要手动启用。
:param request: 调用CreateWorkflow所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.CreateWorkflowRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.CreateWorkflowResponse`
"""
try:
params = request._serialize()
body = self.call("CreateWorkflow", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.CreateWorkflowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteAIAnalysisTemplate(self, request):
"""删除用户自定义内容分析模板。
注意:模板 ID 为 10000 以下的为系统预置模板,不允许删除。
:param request: 调用DeleteAIAnalysisTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteAIAnalysisTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteAIAnalysisTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteAIAnalysisTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteAIAnalysisTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteAIRecognitionTemplate(self, request):
"""删除用户自定义内容识别模板。
:param request: 调用DeleteAIRecognitionTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteAIRecognitionTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteAIRecognitionTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteAIRecognitionTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteAIRecognitionTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteAnimatedGraphicsTemplate(self, request):
"""删除用户自定义转动图模板。
:param request: 调用DeleteAnimatedGraphicsTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteAnimatedGraphicsTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteAnimatedGraphicsTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteAnimatedGraphicsTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteAnimatedGraphicsTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteContentReviewTemplate(self, request):
"""删除用户自定义内容审核模板。
:param request: 调用DeleteContentReviewTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteContentReviewTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteContentReviewTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteContentReviewTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteContentReviewTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteImageSpriteTemplate(self, request):
"""删除雪碧图模板。
:param request: 调用DeleteImageSpriteTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteImageSpriteTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteImageSpriteTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteImageSpriteTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteImageSpriteTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeletePersonSample(self, request):
"""该接口用于根据人物 ID,删除人物样本。
:param request: 调用DeletePersonSample所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeletePersonSampleRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeletePersonSampleResponse`
"""
try:
params = request._serialize()
body = self.call("DeletePersonSample", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeletePersonSampleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteSampleSnapshotTemplate(self, request):
"""删除用户自定义采样截图模板。
:param request: 调用DeleteSampleSnapshotTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteSampleSnapshotTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteSampleSnapshotTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteSampleSnapshotTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteSampleSnapshotTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteSnapshotByTimeOffsetTemplate(self, request):
"""删除用户自定义指定时间点截图模板。
:param request: 调用DeleteSnapshotByTimeOffsetTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteSnapshotByTimeOffsetTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteSnapshotByTimeOffsetTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteSnapshotByTimeOffsetTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteSnapshotByTimeOffsetTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteTranscodeTemplate(self, request):
"""删除用户自定义转码模板。
:param request: 调用DeleteTranscodeTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteTranscodeTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteTranscodeTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteTranscodeTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteTranscodeTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteWatermarkTemplate(self, request):
"""删除用户自定义水印模板。
:param request: 调用DeleteWatermarkTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteWatermarkTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteWatermarkTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteWatermarkTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteWatermarkTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteWordSamples(self, request):
"""该接口用于批量删除关键词样本。
:param request: 调用DeleteWordSamples所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteWordSamplesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteWordSamplesResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteWordSamples", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteWordSamplesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DeleteWorkflow(self, request):
"""删除工作流。对于已启用的工作流,需要禁用后才能删除。
:param request: 调用DeleteWorkflow所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DeleteWorkflowRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DeleteWorkflowResponse`
"""
try:
params = request._serialize()
body = self.call("DeleteWorkflow", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DeleteWorkflowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeAIAnalysisTemplates(self, request):
"""根据内容分析模板唯一标识,获取内容分析模板详情列表。返回结果包含符合条件的所有用户自定义内容分析模板及系统预置视频内容分析模板
:param request: 调用DescribeAIAnalysisTemplates所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeAIAnalysisTemplatesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeAIAnalysisTemplatesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeAIAnalysisTemplates", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeAIAnalysisTemplatesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeAIRecognitionTemplates(self, request):
"""根据内容识别模板唯一标识,获取内容识别模板详情列表。返回结果包含符合条件的所有用户自定义内容识别模板及系统预置视频内容识别模板
:param request: 调用DescribeAIRecognitionTemplates所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeAIRecognitionTemplatesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeAIRecognitionTemplatesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeAIRecognitionTemplates", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeAIRecognitionTemplatesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeAnimatedGraphicsTemplates(self, request):
"""查询转动图模板列表,支持根据条件,分页查询。
:param request: 调用DescribeAnimatedGraphicsTemplates所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeAnimatedGraphicsTemplatesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeAnimatedGraphicsTemplatesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeAnimatedGraphicsTemplates", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeAnimatedGraphicsTemplatesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeContentReviewTemplates(self, request):
"""根据内容审核模板唯一标识,获取内容审核模板详情列表。返回结果包含符合条件的所有用户自定义模板及系统预置内容审核模板。
:param request: 调用DescribeContentReviewTemplates所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeContentReviewTemplatesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeContentReviewTemplatesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeContentReviewTemplates", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeContentReviewTemplatesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeImageSpriteTemplates(self, request):
"""查询雪碧图模板,支持根据条件,分页查询。
:param request: 调用DescribeImageSpriteTemplates所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeImageSpriteTemplatesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeImageSpriteTemplatesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeImageSpriteTemplates", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeImageSpriteTemplatesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribePersonSamples(self, request):
"""该接口用于查询人物样本信息,支持根据人物 ID、名称、标签,分页查询。
:param request: 调用DescribePersonSamples所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribePersonSamplesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribePersonSamplesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribePersonSamples", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribePersonSamplesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSampleSnapshotTemplates(self, request):
"""查询采样截图模板,支持根据条件,分页查询。
:param request: 调用DescribeSampleSnapshotTemplates所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeSampleSnapshotTemplatesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeSampleSnapshotTemplatesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSampleSnapshotTemplates", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSampleSnapshotTemplatesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeSnapshotByTimeOffsetTemplates(self, request):
"""查询指定时间点截图模板,支持根据条件,分页查询。
:param request: 调用DescribeSnapshotByTimeOffsetTemplates所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeSnapshotByTimeOffsetTemplatesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeSnapshotByTimeOffsetTemplatesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeSnapshotByTimeOffsetTemplates", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeSnapshotByTimeOffsetTemplatesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeTaskDetail(self, request):
"""通过任务 ID 查询任务的执行状态和结果的详细信息(最多可以查询3天之内提交的任务)。
:param request: 调用DescribeTaskDetail所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeTaskDetailRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeTaskDetailResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeTaskDetail", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeTaskDetailResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeTasks(self, request):
"""* 该接口用于查询任务列表;
* 当列表数据比较多时,单次接口调用无法拉取整个列表,可通过 ScrollToken 参数,分批拉取;
* 只能查询到最近三天(72 小时)内的任务。
:param request: 调用DescribeTasks所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeTasksRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeTasksResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeTasks", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeTasksResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeTranscodeTemplates(self, request):
"""根据转码模板唯一标识,获取转码模板详情列表。返回结果包含符合条件的所有用户自定义模板及[系统预置转码模板](https://cloud.tencent.com/document/product/266/33476#.E9.A2.84.E7.BD.AE.E8.BD.AC.E7.A0.81.E6.A8.A1.E6.9D.BF)。
:param request: 调用DescribeTranscodeTemplates所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeTranscodeTemplatesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeTranscodeTemplatesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeTranscodeTemplates", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeTranscodeTemplatesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeWatermarkTemplates(self, request):
"""查询用户自定义水印模板,支持根据条件,分页查询。
:param request: 调用DescribeWatermarkTemplates所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeWatermarkTemplatesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeWatermarkTemplatesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeWatermarkTemplates", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeWatermarkTemplatesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeWordSamples(self, request):
"""该接口用于根据应用场景、关键词、标签,分页查询关键词样本信息。
:param request: 调用DescribeWordSamples所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeWordSamplesRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeWordSamplesResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeWordSamples", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeWordSamplesResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DescribeWorkflows(self, request):
"""根据工作流 ID,获取工作流详情列表。
:param request: 调用DescribeWorkflows所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DescribeWorkflowsRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DescribeWorkflowsResponse`
"""
try:
params = request._serialize()
body = self.call("DescribeWorkflows", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DescribeWorkflowsResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def DisableWorkflow(self, request):
"""禁用工作流。
:param request: 调用DisableWorkflow所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.DisableWorkflowRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.DisableWorkflowResponse`
"""
try:
params = request._serialize()
body = self.call("DisableWorkflow", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.DisableWorkflowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def EnableWorkflow(self, request):
"""启用工作流。
:param request: 调用EnableWorkflow所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.EnableWorkflowRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.EnableWorkflowResponse`
"""
try:
params = request._serialize()
body = self.call("EnableWorkflow", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.EnableWorkflowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyAIAnalysisTemplate(self, request):
"""修改用户自定义内容分析模板。
注意:模板 ID 10000 以下的为系统预置模板,不允许修改。
:param request: 调用ModifyAIAnalysisTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifyAIAnalysisTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifyAIAnalysisTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyAIAnalysisTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyAIAnalysisTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyAIRecognitionTemplate(self, request):
"""修改用户自定义内容识别模板。
:param request: 调用ModifyAIRecognitionTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifyAIRecognitionTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifyAIRecognitionTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyAIRecognitionTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyAIRecognitionTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyAnimatedGraphicsTemplate(self, request):
"""修改用户自定义转动图模板。
:param request: 调用ModifyAnimatedGraphicsTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifyAnimatedGraphicsTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifyAnimatedGraphicsTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyAnimatedGraphicsTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyAnimatedGraphicsTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyContentReviewTemplate(self, request):
"""修改用户自定义内容审核模板。
:param request: 调用ModifyContentReviewTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifyContentReviewTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifyContentReviewTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyContentReviewTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyContentReviewTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyImageSpriteTemplate(self, request):
"""修改用户自定义雪碧图模板。
:param request: 调用ModifyImageSpriteTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifyImageSpriteTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifyImageSpriteTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyImageSpriteTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyImageSpriteTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyPersonSample(self, request):
"""该接口用于根据人物 ID,修改人物样本信息,包括名称、描述的修改,以及人脸、标签的添加、删除、重置操作。人脸删除操作需保证至少剩余 1 张图片,否则,请使用重置操作。
:param request: 调用ModifyPersonSample所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifyPersonSampleRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifyPersonSampleResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyPersonSample", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyPersonSampleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifySampleSnapshotTemplate(self, request):
"""修改用户自定义采样截图模板。
:param request: 调用ModifySampleSnapshotTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifySampleSnapshotTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifySampleSnapshotTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("ModifySampleSnapshotTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifySampleSnapshotTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifySnapshotByTimeOffsetTemplate(self, request):
"""修改用户自定义指定时间点截图模板。
:param request: 调用ModifySnapshotByTimeOffsetTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifySnapshotByTimeOffsetTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifySnapshotByTimeOffsetTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("ModifySnapshotByTimeOffsetTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifySnapshotByTimeOffsetTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyTranscodeTemplate(self, request):
"""修改用户自定义转码模板信息。
:param request: 调用ModifyTranscodeTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifyTranscodeTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifyTranscodeTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyTranscodeTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyTranscodeTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyWatermarkTemplate(self, request):
"""修改用户自定义水印模板,水印类型不允许修改。
:param request: 调用ModifyWatermarkTemplate所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifyWatermarkTemplateRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifyWatermarkTemplateResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyWatermarkTemplate", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyWatermarkTemplateResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ModifyWordSample(self, request):
"""该接口用于修改关键词的应用场景、标签,关键词本身不可修改,如需修改,可删除重建。
:param request: 调用ModifyWordSample所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ModifyWordSampleRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ModifyWordSampleResponse`
"""
try:
params = request._serialize()
body = self.call("ModifyWordSample", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ModifyWordSampleResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ParseLiveStreamProcessNotification(self, request):
"""从 CMQ 获取到消息后,从消息的 msgBody 字段中解析出 MPS 直播流处理事件通知的内容。
该接口不用于发起网络调用,而是用来帮助生成各个语言平台的 SDK,您可以参考 SDK 的中解析函数的实现事件通知的解析。
:param request: 调用ParseLiveStreamProcessNotification所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ParseLiveStreamProcessNotificationRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ParseLiveStreamProcessNotificationResponse`
"""
try:
params = request._serialize()
body = self.call("ParseLiveStreamProcessNotification", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ParseLiveStreamProcessNotificationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ParseNotification(self, request):
"""从 CMQ 获取到消息后,从消息的 msgBody 字段中解析出 MPS 事件通知的内容。
该接口不用于发起网络调用,而是用来帮助生成各个语言平台的 SDK,您可以参考 SDK 的中解析函数的实现事件通知的解析。
:param request: 调用ParseNotification所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ParseNotificationRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ParseNotificationResponse`
"""
try:
params = request._serialize()
body = self.call("ParseNotification", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ParseNotificationResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ProcessLiveStream(self, request):
"""对直播流媒体发起处理任务,功能包括:
* 智能内容审核(画面鉴黄、鉴政、鉴暴、声音鉴黄)。
直播流处理事件通知实时写入用户指定的消息队列 CMQ 中,用户需要从消息队列 CMQ 中获取事件通知结果,同时处理过程中存在输出文件的,会写入用户指定的输出文件的目标存储中。
:param request: 调用ProcessLiveStream所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ProcessLiveStreamRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ProcessLiveStreamResponse`
"""
try:
params = request._serialize()
body = self.call("ProcessLiveStream", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ProcessLiveStreamResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ProcessMedia(self, request):
"""对 COS 中的媒体文件发起处理任务,功能包括:
1. 视频转码(带水印);
2. 视频转动图;
3. 对视频按指定时间点截图;
4. 对视频采样截图;
5. 对视频截图雪碧图;
6. 对视频转自适应码流;
7. 智能内容审核(鉴黄、鉴恐、鉴政);
8. 智能内容分析(标签、分类、封面、按帧标签);
9. 智能内容识别(人脸、文本全文、文本关键词、语音全文、语音关键词)。
:param request: 调用ProcessMedia所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ProcessMediaRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ProcessMediaResponse`
"""
try:
params = request._serialize()
body = self.call("ProcessMedia", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ProcessMediaResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message)
def ResetWorkflow(self, request):
"""重新设置一个已经存在且处于禁用状态的工作流。
:param request: 调用ResetWorkflow所需参数的结构体。
:type request: :class:`tencentcloud.mps.v20190612.models.ResetWorkflowRequest`
:rtype: :class:`tencentcloud.mps.v20190612.models.ResetWorkflowResponse`
"""
try:
params = request._serialize()
body = self.call("ResetWorkflow", params)
response = json.loads(body)
if "Error" not in response["Response"]:
model = models.ResetWorkflowResponse()
model._deserialize(response["Response"])
return model
else:
code = response["Response"]["Error"]["Code"]
message = response["Response"]["Error"]["Message"]
reqid = response["Response"]["RequestId"]
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | [
"tencentcloudapi@tencent.com"
] | tencentcloudapi@tencent.com |
e383ea3229f57026689c505f94d5e91c01c69485 | 0f6db051f895b5f4621610c9f84eb678e83629bf | /src/learn/train_peak_model.py | 1ce3a6d64474cb7d8a8f37b07a708d41f97ffe86 | [
"MIT"
] | permissive | stonewell/learn-curve | 313b18c01b0acc208390bf8589c674c7e758cdba | ae787e2f84d3b91f59257b500b11b1dd8904430e | refs/heads/main | 2022-05-25T10:38:44.238297 | 2022-04-01T17:04:12 | 2022-04-01T17:04:12 | 61,493,486 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,335 | py | import os
import sys
import argparse
import datetime
import logging
import json
import pathlib
import pandas as pd
import numpy as np
from learn.peak_analyze import PeakAnalyze
sys.dont_write_bytecode = True
def valid_date(s):
try:
return datetime.datetime.strptime(s, "%Y%m%d").date()
except:
msg = "Not a valid date: '{0}'.".format(s)
raise argparse.ArgumentTypeError(msg)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", help="print debug information", action="count", default=0)
parser.add_argument("-v", "--version", action="version", version='%(prog)s 1.0')
parser.add_argument("-n", "--interval", type=int, help='days interval for training', default=5)
parser.add_argument('-o', "--output", help="save generated model to the file", required=True,
type=pathlib.Path, metavar='<output file>')
parser.add_argument('-i', "--input", help="training data directory for peak analysis", required=True,
type=pathlib.Path, metavar='<training data directory>')
parser.add_argument('-t', "--validate_input", help="training validating data directory for peak analysis", required=True,
type=pathlib.Path, metavar='<training validating data directory>')
return parser.parse_args()
def validate_args(args):
if args.interval <= 0:
raise argparse.ArgumentTypeError('invalid training interval:{}'.format(args.interval))
if not args.input.is_dir():
raise argparse.ArgumentTypeError('invalid training data directory:{}'.format(args.input))
if not args.validate_input.is_dir():
raise argparse.ArgumentTypeError('invalid training validation data directory:{}'.format(args.validate_input))
def main():
args = parse_arguments()
if args.debug > 0:
logging.getLogger('').setLevel(logging.DEBUG)
else:
logging.getLogger('').setLevel(logging.INFO)
logging.debug('debug level:{}'.format(args.debug))
validate_args(args)
features, label = load_features_label(args.input, args)
v_features, v_label = load_features_label(args.validate_input, args)
train_model(features, label,
v_features, v_label,
args)
def load_features_label(input_dir, args):
features = []
label = []
for training_file in input_dir.iterdir():
training_data = load_data(training_file, args)
if len(training_data) < args.interval:
logging.warning('{} too few training data, need at least {}.'.format(training_file, args.interval))
continue
features_, label_ = build_features_and_label(training_data, args)
features.extend(features_)
label.extend(label_)
return features, label
def load_data(training_file, args):
return pd.read_csv(training_file, index_col=0).fillna(0)
def build_features_and_label(training_data, args):
features_data = training_data[training_data.columns[:-1]].values.tolist()
label_data = training_data['trade'].tolist()
features = []
label = []
for index in range(0, len(features_data) - args.interval, 1):
entries = features_data[index : index + args.interval]
features.append(entries)
l = label_data[index + args.interval]
#if before or after change range label is not 0
#and current label is 0, use before/after label
check_range = 1
if l == 0:
for t in range(check_range):
try:
l1 = label_data[index + args.interval - t - 1]
except:
l1 = 0
try:
l2 = label_data[index + args.interval + t + 1]
except:
l2 = 0
l = l1 if l1 != 0 else l2 if l2 != 0 else 0
if l != 0:
break
label.append(l)
#one hot encode label
label_array = np.zeros((len(label), 3), dtype = np.int8)
for label_index, l in enumerate(label):
l_index = l if l == 0 else 1 if l == 50 else 2
label_array[label_index, l_index] = 1
return features, label_array.tolist()
def train_model(features, label,
v_features, v_label,
args):
train_model_keras_rnn(features, label,
v_features, v_label,
args)
def train_model_keras_rnn(features, label,
v_features, v_label,
args):
from keras.models import Sequential
from keras.layers import LSTM, Dense, Dropout
from keras import Input
from tensorflow.keras import optimizers
model = Sequential()
model.add(Input(shape=(args.interval, len(features[0][0]))))
# use default activation to use cuDNN kernel
model.add(LSTM(512,
return_sequences=True,
dropout=.1,
#activation='relu'
))
model.add(LSTM(128,
return_sequences=False,
dropout=.1,
#activation='relu'
))
model.add(Dense(64,
activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(3,
activation='softmax'))
optimizer = optimizers.Adam()
model.compile(optimizer=optimizer,
loss='categorical_crossentropy',
#loss='mean_squared_error',
metrics=['accuracy'])
model.summary()
from keras.callbacks import EarlyStopping, ModelCheckpoint
# Create callbacks
callbacks = [
EarlyStopping(monitor='val_loss', patience=5),
ModelCheckpoint(args.output,
save_best_only=True,
save_weights_only=False)
]
history = model.fit(features,
label,
epochs=150,
callbacks=callbacks,
validation_data=(v_features, v_label))
if __name__ == '__main__':
main()
| [
"jingnan.si@gmail.com"
] | jingnan.si@gmail.com |
23bf6fd9231c759ca6394cdab98810466382cf77 | 82a9077bcb5a90d88e0a8be7f8627af4f0844434 | /google-cloud-sdk/lib/tests/unit/surface/anthos/create_login_config_test.py | 455176b1ecbce0ed2d1131aa979b3d28b42a7223 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | piotradamczyk5/gcloud_cli | 1ae2553595e569fad6ce84af62b91a7ee5489017 | 384ece11040caadcd64d51da74e0b8491dd22ca3 | refs/heads/master | 2023-01-01T23:00:27.858583 | 2020-10-21T04:21:23 | 2020-10-21T04:21:23 | 290,238,061 | 0 | 0 | null | 2020-10-19T16:43:36 | 2020-08-25T14:31:00 | Python | UTF-8 | Python | false | false | 2,107 | py | # -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""anthos auth create login config tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from tests.lib import test_case
from tests.lib.surface.anthos import test_base as anthos_test_base
class CreateLoginConfigTest(anthos_test_base.AuthUnitTestBase):
def testCreateLoginConfigWithDefaults(self):
self.Run('anthos create-login-config --kubeconfig my-config.yaml')
self.AssertValidBinaryCall(
env={'COBRA_SILENCE_USAGE': 'true', 'GCLOUD_AUTH_PLUGIN': 'true'},
command_args=[
anthos_test_base._MOCK_ANTHOS_AUTH_BINARY,
'create-login-config',
'--kubeconfig',
'my-config.yaml',])
self.AssertErrContains('Configuring Anthos authentication')
def testCreateLoginConfigExplicit(self):
self.Run('anthos create-login-config --kubeconfig my-config.yaml '
'--output my_output.yaml --merge-from orig-config.yaml')
self.AssertValidBinaryCall(
env={'COBRA_SILENCE_USAGE': 'true', 'GCLOUD_AUTH_PLUGIN': 'true'},
command_args=[
anthos_test_base._MOCK_ANTHOS_AUTH_BINARY,
'create-login-config',
'--kubeconfig',
'my-config.yaml',
'--output',
'my_output.yaml',
'--merge-from',
'orig-config.yaml',
])
self.AssertErrContains('Configuring Anthos authentication')
if __name__ == '__main__':
test_case.main()
| [
"code@bootstraponline.com"
] | code@bootstraponline.com |
4a4a579f82001e30dd5e40cfb88a7d41554d1279 | 74e3bf0160007fb1e6908879fe743b6cd74fd379 | /python/day09/weavo.py | b6dd802b26541e558a0d2cfaf6009b7dd593dce8 | [] | no_license | edutak/TIL-2 | 7a5586081af9172b1143dd4aaddef8954fe9fe81 | 4f736019883c5153b2afeb7b490014b9bd569b18 | refs/heads/master | 2023-05-08T16:48:26.283999 | 2021-06-04T06:40:39 | 2021-06-04T06:40:39 | 373,745,204 | 0 | 0 | null | 2021-06-04T06:37:17 | 2021-06-04T06:37:16 | null | UTF-8 | Python | false | false | 776 | py | class WeadbVO:
def __init__(self,city,province,tmn,tmx,date):
self.__city = city;
self.__province = province;
self.__tmn = tmn;
self.__tmx = tmx;
self.__date = date;
def __str__(self):
return '%s, %s, %.2f %.2f %s' % (self.__city,self.__province,self.__tmn,self.__tmx,self.__date);
def getCity(self):
return self.__city;
def getProvince(self):
return self.__province;
def setProvince(self, province):
self.__province = province;
def getTmn(self):
return self.__tmn;
def setTmn(self, tmn):
self.__tmn = tmn;
def getTmx(self):
return self.__tmx;
def setTmx(self, tmx):
self.__tmx = tmx;
def getDate(self):
return self.__date;
| [
"cjdauddl93@gmail.com"
] | cjdauddl93@gmail.com |
63e1858fbbcea79a393de7f9e6c4504f40e5fe1e | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /tests/components/switchbot/test_config_flow.py | 851746582798ddfc0475281d02062cee6b9971d0 | [
"Apache-2.0"
] | permissive | home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | 2023-08-31T15:41:06.299469 | 2023-08-31T14:50:53 | 2023-08-31T14:50:53 | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | 2023-09-14T21:50:15 | 2013-09-17T07:29:48 | Python | UTF-8 | Python | false | false | 26,067 | py | """Test the switchbot config flow."""
from unittest.mock import patch
from switchbot import SwitchbotAccountConnectionError, SwitchbotAuthenticationError
from homeassistant.components.switchbot.const import (
CONF_ENCRYPTION_KEY,
CONF_KEY_ID,
CONF_RETRY_COUNT,
)
from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER
from homeassistant.const import (
CONF_ADDRESS,
CONF_NAME,
CONF_PASSWORD,
CONF_SENSOR_TYPE,
CONF_USERNAME,
)
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from . import (
NOT_SWITCHBOT_INFO,
USER_INPUT,
WOCURTAIN_SERVICE_INFO,
WOHAND_ENCRYPTED_SERVICE_INFO,
WOHAND_SERVICE_ALT_ADDRESS_INFO,
WOHAND_SERVICE_INFO,
WOHAND_SERVICE_INFO_NOT_CONNECTABLE,
WOLOCK_SERVICE_INFO,
WOSENSORTH_SERVICE_INFO,
init_integration,
patch_async_setup_entry,
)
from tests.common import MockConfigEntry
DOMAIN = "switchbot"
async def test_bluetooth_discovery(hass: HomeAssistant) -> None:
"""Test discovery via bluetooth with a valid device."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_BLUETOOTH},
data=WOHAND_SERVICE_INFO,
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "confirm"
with patch_async_setup_entry() as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Bot EEFF"
assert result["data"] == {
CONF_ADDRESS: "AA:BB:CC:DD:EE:FF",
CONF_SENSOR_TYPE: "bot",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_bluetooth_discovery_requires_password(hass: HomeAssistant) -> None:
"""Test discovery via bluetooth with a valid device that needs a password."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_BLUETOOTH},
data=WOHAND_ENCRYPTED_SERVICE_INFO,
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "password"
with patch_async_setup_entry() as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PASSWORD: "abc123"},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Bot 923B"
assert result["data"] == {
CONF_ADDRESS: "798A8547-2A3D-C609-55FF-73FA824B923B",
CONF_SENSOR_TYPE: "bot",
CONF_PASSWORD: "abc123",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_bluetooth_discovery_lock_key(hass: HomeAssistant) -> None:
"""Test discovery via bluetooth with a lock."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_BLUETOOTH},
data=WOLOCK_SERVICE_INFO,
)
assert result["type"] == FlowResultType.MENU
assert result["step_id"] == "lock_choose_method"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"next_step_id": "lock_key"}
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "lock_key"
assert result["errors"] == {}
with patch(
"homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key",
return_value=False,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KEY_ID: "",
CONF_ENCRYPTION_KEY: "",
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "lock_key"
assert result["errors"] == {"base": "encryption_key_invalid"}
with patch_async_setup_entry() as mock_setup_entry, patch(
"homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key",
return_value=True,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KEY_ID: "ff",
CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff",
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Lock EEFF"
assert result["data"] == {
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_KEY_ID: "ff",
CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff",
CONF_SENSOR_TYPE: "lock",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_bluetooth_discovery_already_setup(hass: HomeAssistant) -> None:
"""Test discovery via bluetooth with a valid device when already setup."""
entry = MockConfigEntry(
domain=DOMAIN,
data={
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_NAME: "test-name",
CONF_PASSWORD: "test-password",
CONF_SENSOR_TYPE: "bot",
},
unique_id="aabbccddeeff",
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_BLUETOOTH},
data=WOHAND_SERVICE_INFO,
)
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "already_configured"
async def test_async_step_bluetooth_not_switchbot(hass: HomeAssistant) -> None:
"""Test discovery via bluetooth not switchbot."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_BLUETOOTH},
data=NOT_SWITCHBOT_INFO,
)
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "not_supported"
async def test_async_step_bluetooth_not_connectable(hass: HomeAssistant) -> None:
"""Test discovery via bluetooth and its not connectable switchbot."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_BLUETOOTH},
data=WOHAND_SERVICE_INFO_NOT_CONNECTABLE,
)
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "not_supported"
async def test_user_setup_wohand(hass: HomeAssistant) -> None:
"""Test the user initiated form with password and valid mac."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[WOHAND_SERVICE_INFO],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "confirm"
assert result["errors"] is None
with patch_async_setup_entry() as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Bot EEFF"
assert result["data"] == {
CONF_ADDRESS: "AA:BB:CC:DD:EE:FF",
CONF_SENSOR_TYPE: "bot",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_setup_wohand_already_configured(hass: HomeAssistant) -> None:
"""Test the user initiated form with password and valid mac."""
entry = MockConfigEntry(
domain=DOMAIN,
data={
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_NAME: "test-name",
CONF_PASSWORD: "test-password",
CONF_SENSOR_TYPE: "bot",
},
unique_id="aabbccddeeff",
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[WOHAND_SERVICE_INFO],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "no_devices_found"
async def test_user_setup_wocurtain(hass: HomeAssistant) -> None:
"""Test the user initiated form with password and valid mac."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[WOCURTAIN_SERVICE_INFO],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "confirm"
assert result["errors"] is None
with patch_async_setup_entry() as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Curtain EEFF"
assert result["data"] == {
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_SENSOR_TYPE: "curtain",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_setup_wocurtain_or_bot(hass: HomeAssistant) -> None:
"""Test the user initiated form with valid address."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[
NOT_SWITCHBOT_INFO,
WOCURTAIN_SERVICE_INFO,
WOHAND_SERVICE_ALT_ADDRESS_INFO,
WOHAND_SERVICE_INFO_NOT_CONNECTABLE,
],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "user"
assert result["errors"] == {}
with patch_async_setup_entry() as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
USER_INPUT,
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Curtain EEFF"
assert result["data"] == {
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_SENSOR_TYPE: "curtain",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_setup_wocurtain_or_bot_with_password(hass: HomeAssistant) -> None:
"""Test the user initiated form and valid address and a bot with a password."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[
WOCURTAIN_SERVICE_INFO,
WOHAND_ENCRYPTED_SERVICE_INFO,
WOHAND_SERVICE_INFO_NOT_CONNECTABLE,
],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "user"
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_ADDRESS: "798A8547-2A3D-C609-55FF-73FA824B923B"},
)
assert result2["type"] == FlowResultType.FORM
assert result2["step_id"] == "password"
assert result2["errors"] is None
with patch_async_setup_entry() as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"],
{CONF_PASSWORD: "abc123"},
)
await hass.async_block_till_done()
assert result3["type"] == FlowResultType.CREATE_ENTRY
assert result3["title"] == "Bot 923B"
assert result3["data"] == {
CONF_ADDRESS: "798A8547-2A3D-C609-55FF-73FA824B923B",
CONF_PASSWORD: "abc123",
CONF_SENSOR_TYPE: "bot",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_setup_single_bot_with_password(hass: HomeAssistant) -> None:
"""Test the user initiated form for a bot with a password."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[WOHAND_ENCRYPTED_SERVICE_INFO],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "password"
assert result["errors"] is None
with patch_async_setup_entry() as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PASSWORD: "abc123"},
)
await hass.async_block_till_done()
assert result2["type"] == FlowResultType.CREATE_ENTRY
assert result2["title"] == "Bot 923B"
assert result2["data"] == {
CONF_ADDRESS: "798A8547-2A3D-C609-55FF-73FA824B923B",
CONF_PASSWORD: "abc123",
CONF_SENSOR_TYPE: "bot",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_setup_wolock_key(hass: HomeAssistant) -> None:
"""Test the user initiated form for a lock."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[WOLOCK_SERVICE_INFO],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.MENU
assert result["step_id"] == "lock_choose_method"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"next_step_id": "lock_key"}
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "lock_key"
assert result["errors"] == {}
with patch(
"homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key",
return_value=False,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KEY_ID: "",
CONF_ENCRYPTION_KEY: "",
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "lock_key"
assert result["errors"] == {"base": "encryption_key_invalid"}
with patch_async_setup_entry() as mock_setup_entry, patch(
"homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key",
return_value=True,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KEY_ID: "ff",
CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff",
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Lock EEFF"
assert result["data"] == {
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_KEY_ID: "ff",
CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff",
CONF_SENSOR_TYPE: "lock",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_setup_wolock_auth(hass: HomeAssistant) -> None:
"""Test the user initiated form for a lock."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[WOLOCK_SERVICE_INFO],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.MENU
assert result["step_id"] == "lock_choose_method"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"next_step_id": "lock_auth"}
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "lock_auth"
assert result["errors"] == {}
with patch(
"homeassistant.components.switchbot.config_flow.SwitchbotLock.retrieve_encryption_key",
side_effect=SwitchbotAuthenticationError("error from api"),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_USERNAME: "",
CONF_PASSWORD: "",
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "lock_auth"
assert result["errors"] == {"base": "auth_failed"}
assert "error from api" in result["description_placeholders"]["error_detail"]
with patch_async_setup_entry() as mock_setup_entry, patch(
"homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key",
return_value=True,
), patch(
"homeassistant.components.switchbot.config_flow.SwitchbotLock.retrieve_encryption_key",
return_value={
CONF_KEY_ID: "ff",
CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff",
},
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Lock EEFF"
assert result["data"] == {
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_KEY_ID: "ff",
CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff",
CONF_SENSOR_TYPE: "lock",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_setup_wolock_auth_switchbot_api_down(hass: HomeAssistant) -> None:
"""Test the user initiated form for a lock when the switchbot api is down."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[WOLOCK_SERVICE_INFO],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.MENU
assert result["step_id"] == "lock_choose_method"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"next_step_id": "lock_auth"}
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "lock_auth"
assert result["errors"] == {}
with patch(
"homeassistant.components.switchbot.config_flow.SwitchbotLock.retrieve_encryption_key",
side_effect=SwitchbotAccountConnectionError,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_USERNAME: "",
CONF_PASSWORD: "",
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "cannot_connect"
async def test_user_setup_wolock_or_bot(hass: HomeAssistant) -> None:
"""Test the user initiated form for a lock."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[
WOLOCK_SERVICE_INFO,
WOHAND_SERVICE_ALT_ADDRESS_INFO,
],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "user"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
USER_INPUT,
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.MENU
assert result["step_id"] == "lock_choose_method"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={"next_step_id": "lock_key"}
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "lock_key"
assert result["errors"] == {}
with patch_async_setup_entry() as mock_setup_entry, patch(
"homeassistant.components.switchbot.config_flow.SwitchbotLock.verify_encryption_key",
return_value=True,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_KEY_ID: "ff",
CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff",
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Lock EEFF"
assert result["data"] == {
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_KEY_ID: "ff",
CONF_ENCRYPTION_KEY: "ffffffffffffffffffffffffffffffff",
CONF_SENSOR_TYPE: "lock",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_setup_wosensor(hass: HomeAssistant) -> None:
"""Test the user initiated form with password and valid mac."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[WOSENSORTH_SERVICE_INFO],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "confirm"
assert result["errors"] is None
with patch_async_setup_entry() as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["title"] == "Meter EEFF"
assert result["data"] == {
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_SENSOR_TYPE: "hygrometer",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_user_no_devices(hass: HomeAssistant) -> None:
"""Test the user initiated form with password and valid mac."""
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[],
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "no_devices_found"
async def test_async_step_user_takes_precedence_over_discovery(
hass: HomeAssistant,
) -> None:
"""Test manual setup takes precedence over discovery."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_BLUETOOTH},
data=WOCURTAIN_SERVICE_INFO,
)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "confirm"
with patch(
"homeassistant.components.switchbot.config_flow.async_discovered_service_info",
return_value=[WOCURTAIN_SERVICE_INFO],
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
)
assert result["type"] == FlowResultType.FORM
with patch_async_setup_entry() as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
assert result2["type"] == FlowResultType.CREATE_ENTRY
assert result2["title"] == "Curtain EEFF"
assert result2["data"] == {
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_SENSOR_TYPE: "curtain",
}
assert len(mock_setup_entry.mock_calls) == 1
# Verify the original one was aborted
assert not hass.config_entries.flow.async_progress(DOMAIN)
async def test_options_flow(hass: HomeAssistant) -> None:
"""Test updating options."""
entry = MockConfigEntry(
domain=DOMAIN,
data={
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
CONF_NAME: "test-name",
CONF_PASSWORD: "test-password",
CONF_SENSOR_TYPE: "bot",
},
options={
CONF_RETRY_COUNT: 10,
},
unique_id="aabbccddeeff",
)
entry.add_to_hass(hass)
with patch_async_setup_entry() as mock_setup_entry:
entry = await init_integration(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "init"
assert result["errors"] is None
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_RETRY_COUNT: 3,
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["data"][CONF_RETRY_COUNT] == 3
assert len(mock_setup_entry.mock_calls) == 2
# Test changing of entry options.
with patch_async_setup_entry() as mock_setup_entry:
entry = await init_integration(hass)
result = await hass.config_entries.options.async_init(entry.entry_id)
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "init"
assert result["errors"] is None
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_RETRY_COUNT: 6,
},
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.CREATE_ENTRY
assert result["data"][CONF_RETRY_COUNT] == 6
assert len(mock_setup_entry.mock_calls) == 1
assert entry.options[CONF_RETRY_COUNT] == 6
| [
"noreply@github.com"
] | home-assistant.noreply@github.com |
653ae4b5122947a39abcf9bcadca408111886667 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Games/Pygame Tutorials/examples/steering/part06.py | bd8e17f8ca75e30f7b3554d7d92e86b3eb1b77c9 | [] | no_license | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:5048d8b9515d53a57667319a2cf5946943860bce3c9fe681264f90e123930e47
size 6107
| [
"nateweiler84@gmail.com"
] | nateweiler84@gmail.com |
e4fa4d9dab472d482bd68322f56afa2de8fa9ac5 | 55647a80c8b412af9df0ba3f50595cc2f29c25e6 | /res/scripts/common/Lib/plat-mac/lib-scriptpackages/CodeWarrior/CodeWarrior_suite.py | a72d29886c8e46caf2deb3ace99075bf1353d978 | [] | no_license | cnsuhao/WOT-0.9.17-CT | 0035eb6070fb4fab8d8ee9f8bbc676c10d511cfb | d1f932d8cabaf8aa21708622e87f83c8d24d6451 | refs/heads/master | 2021-06-08T18:11:07.039293 | 2016-11-19T19:12:37 | 2016-11-19T19:12:37 | null | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 21,270 | py | # 2016.11.19 20:00:57 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/plat-mac/lib-scriptpackages/CodeWarrior/CodeWarrior_suite.py
"""Suite CodeWarrior suite: Terms for scripting the CodeWarrior IDE
Level 0, version 0
Generated from /Volumes/Sap/Applications (Mac OS 9)/Metrowerks CodeWarrior 7.0/Metrowerks CodeWarrior/CodeWarrior IDE 4.2.5
AETE/AEUT resource version 1/0, language 0, script 0
"""
import aetools
import MacOS
_code = 'CWIE'
class CodeWarrior_suite_Events:
_argmap_add = {'new': 'kocl',
'with_data': 'data',
'to_targets': 'TTGT',
'to_group': 'TGRP'}
def add(self, _object, _attributes = {}, **_arguments):
"""add: add elements to a project or target
Required argument: an AE object reference
Keyword argument new: the class of the new element or elements to add
Keyword argument with_data: the initial data for the element or elements
Keyword argument to_targets: the targets to which the new element or elements will be added
Keyword argument to_group: the group to which the new element or elements will be added
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'ADDF'
aetools.keysubst(_arguments, self._argmap_add)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
def build(self, _no_object = None, _attributes = {}, **_arguments):
"""build: build a project or target (equivalent of the Make menu command)
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'MAKE'
if _arguments:
raise TypeError, 'No optional args expected'
if _no_object is not None:
raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
else:
return
def check(self, _object = None, _attributes = {}, **_arguments):
"""check: check the syntax of a file in a project or target
Required argument: the file or files to be checked
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'CHEK'
if _arguments:
raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
def compile_file(self, _object = None, _attributes = {}, **_arguments):
"""compile file: compile a file in a project or target
Required argument: the file or files to be compiled
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'COMP'
if _arguments:
raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
def disassemble_file(self, _object = None, _attributes = {}, **_arguments):
"""disassemble file: disassemble a file in a project or target
Required argument: the file or files to be disassembled
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'DASM'
if _arguments:
raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
_argmap_export = {'in_': 'kfil'}
def export(self, _no_object = None, _attributes = {}, **_arguments):
"""export: Export the project file as an XML file
Keyword argument in_: the XML file in which to export the project
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'EXPT'
aetools.keysubst(_arguments, self._argmap_export)
if _no_object is not None:
raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
else:
return
def remove_object_code(self, _no_object = None, _attributes = {}, **_arguments):
"""remove object code: remove object code from a project or target
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'RMOB'
if _arguments:
raise TypeError, 'No optional args expected'
if _no_object is not None:
raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
else:
return
def remove_target_files(self, _object, _attributes = {}, **_arguments):
"""remove target files: remove files from a target
Required argument: an AE object reference
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'RMFL'
if _arguments:
raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
def run_target(self, _no_object = None, _attributes = {}, **_arguments):
"""run target: run a project or target
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'RUN '
if _arguments:
raise TypeError, 'No optional args expected'
if _no_object is not None:
raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
else:
return
def touch_file(self, _object = None, _attributes = {}, **_arguments):
"""touch file: touch a file in a project or target for compilation
Required argument: the file or files to be touched
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'TOCH'
if _arguments:
raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
def update(self, _no_object = None, _attributes = {}, **_arguments):
"""update: bring a project or target up to date
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'CWIE'
_subcode = 'UP2D'
if _arguments:
raise TypeError, 'No optional args expected'
if _no_object is not None:
raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
if _arguments.has_key('----'):
return _arguments['----']
else:
return
class single_class_browser(aetools.ComponentItem):
"""single class browser - a single class browser """
want = '1BRW'
class _Prop_inherits(aetools.NProperty):
"""inherits - all properties and elements of the given class are inherited by this class. """
which = 'c@#^'
want = 'TXTD'
single_class_browsers = single_class_browser
class single_class_hierarchy(aetools.ComponentItem):
"""single class hierarchy - a single class hierarchy document """
want = '1HIR'
single_class_hierarchies = single_class_hierarchy
class class_browser(aetools.ComponentItem):
"""class browser - a class browser """
want = 'BROW'
class_browsers = class_browser
class file_compare_document(aetools.ComponentItem):
"""file compare document - a file compare document """
want = 'COMP'
file_compare_documents = file_compare_document
class catalog_document(aetools.ComponentItem):
"""catalog document - a browser catalog document """
want = 'CTLG'
catalog_documents = catalog_document
class editor_document(aetools.ComponentItem):
"""editor document - an editor document """
want = 'EDIT'
editor_documents = editor_document
class class_hierarchy(aetools.ComponentItem):
"""class hierarchy - a class hierarchy document """
want = 'HIER'
class_hierarchies = class_hierarchy
class project_inspector(aetools.ComponentItem):
"""project inspector - the project inspector """
want = 'INSP'
project_inspectors = project_inspector
class message_document(aetools.ComponentItem):
"""message document - a message document """
want = 'MSSG'
message_documents = message_document
class build_progress_document(aetools.ComponentItem):
"""build progress document - a build progress document """
want = 'PRGS'
build_progress_documents = build_progress_document
class project_document(aetools.ComponentItem):
"""project document - a project document """
want = 'PRJD'
class _Prop_current_target(aetools.NProperty):
"""current target - the current target """
which = 'CURT'
want = 'TRGT'
project_documents = project_document
class subtarget(aetools.ComponentItem):
"""subtarget - a target that is prerequisite for another target """
want = 'SBTG'
class _Prop_link_against_output(aetools.NProperty):
"""link against output - is the output of this subtarget linked into its dependent target? """
which = 'LNKO'
want = 'bool'
class _Prop_target(aetools.NProperty):
"""target - the target that is dependent on this subtarget """
which = 'TrgT'
want = 'TRGT'
subtargets = subtarget
class target_file(aetools.ComponentItem):
"""target file - a source or header file in a target """
want = 'SRCF'
class _Prop_code_size(aetools.NProperty):
"""code size - the size of the code (in bytes) produced by compiling this source file """
which = 'CSZE'
want = 'long'
class _Prop_compiled_date(aetools.NProperty):
"""compiled date - the date and this source file was last compiled """
which = 'CMPD'
want = 'ldt '
class _Prop_data_size(aetools.NProperty):
"""data size - the size of the date (in bytes) produced by compiling this source file """
which = 'DSZE'
want = 'long'
class _Prop_debug(aetools.NProperty):
"""debug - is debugging information generated for this source file? """
which = 'DBUG'
want = 'bool'
class _Prop_dependents(aetools.NProperty):
"""dependents - the source files that need this source file in order to build """
which = 'DPND'
want = 'list'
class _Prop_id(aetools.NProperty):
"""id - the unique ID number of the target file """
which = 'ID '
want = 'long'
class _Prop_init_before(aetools.NProperty):
"""init before - is the \xd4initialize before\xd5 flag set for this shared library? """
which = 'INIT'
want = 'bool'
class _Prop_link_index(aetools.NProperty):
"""link index - the index of the source file in its target\xd5s link order (-1 if source file is not in link order) """
which = 'LIDX'
want = 'long'
class _Prop_linked(aetools.NProperty):
"""linked - is the source file in the link order of its target? """
which = 'LINK'
want = 'bool'
class _Prop_location(aetools.NProperty):
"""location - the location of the target file on disk """
which = 'FILE'
want = 'fss '
class _Prop_merge_output(aetools.NProperty):
"""merge output - is this shared library merged into another code fragment? """
which = 'MRGE'
want = 'bool'
class _Prop_modified_date(aetools.NProperty):
"""modified date - the date and time this source file was last modified """
which = 'MODD'
want = 'ldt '
class _Prop_path(aetools.NProperty):
"""path - the path of the source file on disk """
which = 'Path'
want = 'itxt'
class _Prop_prerequisites(aetools.NProperty):
"""prerequisites - the source files needed to build this source file """
which = 'PRER'
want = 'list'
class _Prop_type(aetools.NProperty):
"""type - the type of source file """
which = 'FTYP'
want = 'FTYP'
class _Prop_weak_link(aetools.NProperty):
"""weak link - is this shared library linked weakly? """
which = 'WEAK'
want = 'bool'
target_files = target_file
class symbol_browser(aetools.ComponentItem):
"""symbol browser - a symbol browser """
want = 'SYMB'
symbol_browsers = symbol_browser
class ToolServer_worksheet(aetools.ComponentItem):
"""ToolServer worksheet - a ToolServer worksheet """
want = 'TOOL'
ToolServer_worksheets = ToolServer_worksheet
class target(aetools.ComponentItem):
"""target - a target in a project """
want = 'TRGT'
class _Prop_name(aetools.NProperty):
"""name - """
which = 'pnam'
want = 'itxt'
class _Prop_project_document(aetools.NProperty):
"""project document - the project document that contains this target """
which = 'PrjD'
want = 'PRJD'
targets = target
class text_document(aetools.ComponentItem):
"""text document - a document that contains text """
want = 'TXTD'
class _Prop_modified(aetools.NProperty):
"""modified - Has the document been modified since the last save? """
which = 'imod'
want = 'bool'
class _Prop_selection(aetools.NProperty):
"""selection - the selection visible to the user """
which = 'sele'
want = 'csel'
text_documents = text_document
single_class_browser._superclassnames = ['text_document']
single_class_browser._privpropdict = {'inherits': _Prop_inherits}
single_class_browser._privelemdict = {}
import Standard_Suite
single_class_hierarchy._superclassnames = ['document']
single_class_hierarchy._privpropdict = {'inherits': _Prop_inherits}
single_class_hierarchy._privelemdict = {}
class_browser._superclassnames = ['text_document']
class_browser._privpropdict = {'inherits': _Prop_inherits}
class_browser._privelemdict = {}
file_compare_document._superclassnames = ['text_document']
file_compare_document._privpropdict = {'inherits': _Prop_inherits}
file_compare_document._privelemdict = {}
catalog_document._superclassnames = ['text_document']
catalog_document._privpropdict = {'inherits': _Prop_inherits}
catalog_document._privelemdict = {}
editor_document._superclassnames = ['text_document']
editor_document._privpropdict = {'inherits': _Prop_inherits}
editor_document._privelemdict = {}
class_hierarchy._superclassnames = ['document']
class_hierarchy._privpropdict = {'inherits': _Prop_inherits}
class_hierarchy._privelemdict = {}
project_inspector._superclassnames = ['document']
project_inspector._privpropdict = {'inherits': _Prop_inherits}
project_inspector._privelemdict = {}
message_document._superclassnames = ['text_document']
message_document._privpropdict = {'inherits': _Prop_inherits}
message_document._privelemdict = {}
build_progress_document._superclassnames = ['document']
build_progress_document._privpropdict = {'inherits': _Prop_inherits}
build_progress_document._privelemdict = {}
project_document._superclassnames = ['document']
project_document._privpropdict = {'current_target': _Prop_current_target,
'inherits': _Prop_inherits}
project_document._privelemdict = {'target': target}
subtarget._superclassnames = ['target']
subtarget._privpropdict = {'inherits': _Prop_inherits,
'link_against_output': _Prop_link_against_output,
'target': _Prop_target}
subtarget._privelemdict = {}
target_file._superclassnames = []
target_file._privpropdict = {'code_size': _Prop_code_size,
'compiled_date': _Prop_compiled_date,
'data_size': _Prop_data_size,
'debug': _Prop_debug,
'dependents': _Prop_dependents,
'id': _Prop_id,
'init_before': _Prop_init_before,
'link_index': _Prop_link_index,
'linked': _Prop_linked,
'location': _Prop_location,
'merge_output': _Prop_merge_output,
'modified_date': _Prop_modified_date,
'path': _Prop_path,
'prerequisites': _Prop_prerequisites,
'type': _Prop_type,
'weak_link': _Prop_weak_link}
target_file._privelemdict = {}
symbol_browser._superclassnames = ['text_document']
symbol_browser._privpropdict = {'inherits': _Prop_inherits}
symbol_browser._privelemdict = {}
ToolServer_worksheet._superclassnames = ['text_document']
ToolServer_worksheet._privpropdict = {'inherits': _Prop_inherits}
ToolServer_worksheet._privelemdict = {}
target._superclassnames = []
target._privpropdict = {'name': _Prop_name,
'project_document': _Prop_project_document}
target._privelemdict = {'subtarget': subtarget,
'target_file': target_file}
text_document._superclassnames = ['document']
text_document._privpropdict = {'inherits': _Prop_inherits,
'modified': _Prop_modified,
'selection': _Prop_selection}
text_document._privelemdict = {'character': Standard_Suite.character,
'insertion_point': Standard_Suite.insertion_point,
'line': Standard_Suite.line,
'text': Standard_Suite.text}
_Enum_DKND = {'project': 'PRJD',
'editor_document': 'EDIT',
'message': 'MSSG',
'file_compare': 'COMP',
'catalog_document': 'CTLG',
'class_browser': 'BROW',
'single_class_browser': '1BRW',
'symbol_browser': 'SYMB',
'class_hierarchy': 'HIER',
'single_class_hierarchy': '1HIR',
'project_inspector': 'INSP',
'ToolServer_worksheet': 'TOOL',
'build_progress_document': 'PRGS'}
_Enum_FTYP = {'library_file': 'LIBF',
'project_file': 'PRJF',
'resource_file': 'RESF',
'text_file': 'TXTF',
'unknown_file': 'UNKN'}
_Enum_Inte = {'never_interact': 'eNvr',
'interact_with_self': 'eInS',
'interact_with_local': 'eInL',
'interact_with_all': 'eInA'}
_Enum_PERM = {'read_write': 'RdWr',
'read_only': 'Read',
'checked_out_read_write': 'CkRW',
'checked_out_read_only': 'CkRO',
'checked_out_read_modify': 'CkRM',
'locked': 'Lock',
'none': 'LNNO'}
_classdeclarations = {'1BRW': single_class_browser,
'1HIR': single_class_hierarchy,
'BROW': class_browser,
'COMP': file_compare_document,
'CTLG': catalog_document,
'EDIT': editor_document,
'HIER': class_hierarchy,
'INSP': project_inspector,
'MSSG': message_document,
'PRGS': build_progress_document,
'PRJD': project_document,
'SBTG': subtarget,
'SRCF': target_file,
'SYMB': symbol_browser,
'TOOL': ToolServer_worksheet,
'TRGT': target,
'TXTD': text_document}
_propdeclarations = {'CMPD': _Prop_compiled_date,
'CSZE': _Prop_code_size,
'CURT': _Prop_current_target,
'DBUG': _Prop_debug,
'DPND': _Prop_dependents,
'DSZE': _Prop_data_size,
'FILE': _Prop_location,
'FTYP': _Prop_type,
'ID ': _Prop_id,
'INIT': _Prop_init_before,
'LIDX': _Prop_link_index,
'LINK': _Prop_linked,
'LNKO': _Prop_link_against_output,
'MODD': _Prop_modified_date,
'MRGE': _Prop_merge_output,
'PRER': _Prop_prerequisites,
'Path': _Prop_path,
'PrjD': _Prop_project_document,
'TrgT': _Prop_target,
'WEAK': _Prop_weak_link,
'c@#^': _Prop_inherits,
'imod': _Prop_modified,
'pnam': _Prop_name,
'sele': _Prop_selection}
_compdeclarations = {}
_enumdeclarations = {'DKND': _Enum_DKND,
'FTYP': _Enum_FTYP,
'Inte': _Enum_Inte,
'PERM': _Enum_PERM}
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\common\Lib\plat-mac\lib-scriptpackages\CodeWarrior\CodeWarrior_suite.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.11.19 20:00:57 Střední Evropa (běžný čas)
| [
"info@webium.sk"
] | info@webium.sk |
c2d538bc309a95e5b9656e96ad3030ac4e03d3bc | fb881b00c607fbe9a2f29005965e2648958c608c | /dynamodb-boto3-backup-table.py | 718c86ee279af7fa6ad558854cb87445a1c65036 | [] | no_license | renauddahou/boto3_dynamodb_scripts | b7afd604893c3bc9f6f5b774ef4d88e356ae8d0d | 97a380b09a4488b5a8483547c70879711630964a | refs/heads/main | 2023-07-12T11:20:38.335497 | 2021-09-02T20:10:03 | 2021-09-02T20:10:03 | 416,806,791 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | import boto3
client = boto3.client('dynamodb')
response = client.create_backup(
TableName='Employees',
BackupName='Employees-Backup-01'
)
print(response) | [
"noreply@github.com"
] | renauddahou.noreply@github.com |
a48f5024c3a4275d8ac1d87fa467549a2db35489 | 7163a9eebedc129b317e27c6b9affbf49999645b | /triWeb_backend/yanghao_admin/triWeb/migrations/0001_initial.py | 5845252ded9e1c77bb181ecb7f533b3b9503c825 | [] | no_license | forging2012/web_TriAquae | 359fee73eecc4645f541994434f9e1cfd7db999c | 08359b75f26d144ef8efc1d6f63f06cf9026336b | refs/heads/master | 2021-01-15T19:35:37.838599 | 2013-08-24T00:41:19 | 2013-08-24T00:41:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,341 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Idc'
db.create_table(u'triWeb_idc', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=50)),
))
db.send_create_signal(u'triWeb', ['Idc'])
# Adding model 'Group'
db.create_table(u'triWeb_group', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=50)),
))
db.send_create_signal(u'triWeb', ['Group'])
# Adding model 'IP'
db.create_table(u'triWeb_ip', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('hostname', self.gf('django.db.models.fields.CharField')(unique=True, max_length=50)),
('ip', self.gf('django.db.models.fields.IPAddressField')(unique=True, max_length=15)),
('idc', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['triWeb.Idc'], null=True, blank=True)),
('protocol', self.gf('django.db.models.fields.CharField')(default='ssh', max_length=20)),
('port', self.gf('django.db.models.fields.IntegerField')(default='22')),
('os', self.gf('django.db.models.fields.CharField')(default='linux', max_length=20)),
('snmpon', self.gf('django.db.models.fields.BooleanField')(default=True)),
('snmpversion', self.gf('django.db.models.fields.IntegerField')(default=3)),
('snmpcommunityname', self.gf('django.db.models.fields.CharField')(default='public', max_length=20)),
('snmpsecuritylevel', self.gf('django.db.models.fields.CharField')(default='auth', max_length=20)),
('snmpauthprotocol', self.gf('django.db.models.fields.CharField')(default='md5', max_length=20)),
('snmpuser', self.gf('django.db.models.fields.CharField')(default='tritest', max_length=20)),
('snmppassword', self.gf('django.db.models.fields.CharField')(default='123456', max_length=20)),
))
db.send_create_signal(u'triWeb', ['IP'])
# Adding M2M table for field group on 'IP'
db.create_table(u'triWeb_ip_group', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('ip', models.ForeignKey(orm[u'triWeb.ip'], null=False)),
('group', models.ForeignKey(orm[u'triWeb.group'], null=False))
))
db.create_unique(u'triWeb_ip_group', ['ip_id', 'group_id'])
# Adding model 'RemoteUser'
db.create_table(u'triWeb_remoteuser', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
))
db.send_create_signal(u'triWeb', ['RemoteUser'])
# Adding model 'TriaquaeUser'
db.create_table(u'triWeb_triaquaeuser', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75)),
))
db.send_create_signal(u'triWeb', ['TriaquaeUser'])
# Adding M2M table for field remoteuser on 'TriaquaeUser'
db.create_table(u'triWeb_triaquaeuser_remoteuser', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('triaquaeuser', models.ForeignKey(orm[u'triWeb.triaquaeuser'], null=False)),
('remoteuser', models.ForeignKey(orm[u'triWeb.remoteuser'], null=False))
))
db.create_unique(u'triWeb_triaquaeuser_remoteuser', ['triaquaeuser_id', 'remoteuser_id'])
# Adding M2M table for field ip on 'TriaquaeUser'
db.create_table(u'triWeb_triaquaeuser_ip', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('triaquaeuser', models.ForeignKey(orm[u'triWeb.triaquaeuser'], null=False)),
('ip', models.ForeignKey(orm[u'triWeb.ip'], null=False))
))
db.create_unique(u'triWeb_triaquaeuser_ip', ['triaquaeuser_id', 'ip_id'])
# Adding M2M table for field group on 'TriaquaeUser'
db.create_table(u'triWeb_triaquaeuser_group', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('triaquaeuser', models.ForeignKey(orm[u'triWeb.triaquaeuser'], null=False)),
('group', models.ForeignKey(orm[u'triWeb.group'], null=False))
))
db.create_unique(u'triWeb_triaquaeuser_group', ['triaquaeuser_id', 'group_id'])
# Adding model 'AuthByIpAndRemoteUser'
db.create_table(u'triWeb_authbyipandremoteuser', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=1024)),
('authtype', self.gf('django.db.models.fields.CharField')(max_length=100)),
('ip', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['triWeb.IP'], null=True, blank=True)),
('remoteUser', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['triWeb.RemoteUser'], null=True, blank=True)),
))
db.send_create_signal(u'triWeb', ['AuthByIpAndRemoteUser'])
# Adding unique constraint on 'AuthByIpAndRemoteUser', fields ['ip', 'remoteUser']
db.create_unique(u'triWeb_authbyipandremoteuser', ['ip_id', 'remoteUser_id'])
def backwards(self, orm):
# Removing unique constraint on 'AuthByIpAndRemoteUser', fields ['ip', 'remoteUser']
db.delete_unique(u'triWeb_authbyipandremoteuser', ['ip_id', 'remoteUser_id'])
# Deleting model 'Idc'
db.delete_table(u'triWeb_idc')
# Deleting model 'Group'
db.delete_table(u'triWeb_group')
# Deleting model 'IP'
db.delete_table(u'triWeb_ip')
# Removing M2M table for field group on 'IP'
db.delete_table('triWeb_ip_group')
# Deleting model 'RemoteUser'
db.delete_table(u'triWeb_remoteuser')
# Deleting model 'TriaquaeUser'
db.delete_table(u'triWeb_triaquaeuser')
# Removing M2M table for field remoteuser on 'TriaquaeUser'
db.delete_table('triWeb_triaquaeuser_remoteuser')
# Removing M2M table for field ip on 'TriaquaeUser'
db.delete_table('triWeb_triaquaeuser_ip')
# Removing M2M table for field group on 'TriaquaeUser'
db.delete_table('triWeb_triaquaeuser_group')
# Deleting model 'AuthByIpAndRemoteUser'
db.delete_table(u'triWeb_authbyipandremoteuser')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'triWeb.authbyipandremoteuser': {
'Meta': {'unique_together': "(('ip', 'remoteUser'),)", 'object_name': 'AuthByIpAndRemoteUser'},
'authtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['triWeb.IP']", 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'remoteUser': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['triWeb.RemoteUser']", 'null': 'True', 'blank': 'True'})
},
u'triWeb.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
u'triWeb.idc': {
'Meta': {'object_name': 'Idc'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
u'triWeb.ip': {
'Meta': {'object_name': 'IP'},
'group': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['triWeb.Group']", 'null': 'True', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'idc': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['triWeb.Idc']", 'null': 'True', 'blank': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'unique': 'True', 'max_length': '15'}),
'os': ('django.db.models.fields.CharField', [], {'default': "'linux'", 'max_length': '20'}),
'port': ('django.db.models.fields.IntegerField', [], {'default': "'22'"}),
'protocol': ('django.db.models.fields.CharField', [], {'default': "'ssh'", 'max_length': '20'}),
'snmpauthprotocol': ('django.db.models.fields.CharField', [], {'default': "'md5'", 'max_length': '20'}),
'snmpcommunityname': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '20'}),
'snmpon': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'snmppassword': ('django.db.models.fields.CharField', [], {'default': "'123456'", 'max_length': '20'}),
'snmpsecuritylevel': ('django.db.models.fields.CharField', [], {'default': "'auth'", 'max_length': '20'}),
'snmpuser': ('django.db.models.fields.CharField', [], {'default': "'tritest'", 'max_length': '20'}),
'snmpversion': ('django.db.models.fields.IntegerField', [], {'default': '3'})
},
u'triWeb.remoteuser': {
'Meta': {'object_name': 'RemoteUser'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'triWeb.triaquaeuser': {
'Meta': {'object_name': 'TriaquaeUser'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'group': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['triWeb.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['triWeb.IP']", 'null': 'True', 'blank': 'True'}),
'remoteuser': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['triWeb.RemoteUser']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
}
}
complete_apps = ['triWeb'] | [
"lijie3721@126.com"
] | lijie3721@126.com |
f77602c3aaee0f0f8dbd3801a07287105c036d8f | 796613525c40a241b0f88ceb761838a5bca311e1 | /trigger/mkTriggerUncertainties.py | 6bee680a3c641de54b3406a1e2416cab5bc7c16d | [] | no_license | UAEDF/vbfHbb | 377e956a2d002eacd2090a4abbaa6bffb141454e | ecd5bfefa3db8d2c8283e306d68da42de44f7e39 | refs/heads/master | 2020-04-22T16:54:48.622168 | 2015-12-26T16:07:44 | 2015-12-26T16:07:44 | 12,751,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,349 | py | #!/usr/bin/env python
import sys,os,json,re
basepath=os.path.split(os.path.abspath(__file__))[0]
sys.path.append(basepath+'/../common/')
tempargv = sys.argv[:]
sys.argv = []
import ROOT
from ROOT import *
sys.argv = tempargv
import main
from math import *
from optparse import OptionParser,OptionGroup
from copy import deepcopy as dc
from toolkit import *
from dependencyFactory import *
from write_cuts import *
from array import array
global paves
paves = []
####################################################################################################
def axisupdate(h):
ax = h.GetXaxis()
ax.SetTitleFont(42)
ax.SetTitleSize(0.05)
ax.SetTitleOffset(1.00)
ax.SetLabelFont(42)
ax.SetLabelSize(0.05)
ax.SetLabelOffset(0.005)
#
ax = h.GetYaxis()
ax.SetTitleFont(42)
ax.SetTitleSize(0.05)
ax.SetTitleOffset(1.2)
ax.SetLabelFont(42)
ax.SetLabelSize(0.04)
ax.SetLabelOffset(0.005)
#
return h
####################################################################################################
def putLine(x1,y1,x2,y2,style=kDashed,color=kGray+2,width=1):
global paves
l = TLine(x1,y1,x2,y2)
l.SetLineStyle(style)
l.SetLineColor(color)
l.SetLineWidth(width)
l.Draw()
paves += [l]
####################################################################################################
def putPave(text,x1,y1,align=12,font=42,size=0.045,color=kBlack,ndc=1):
global paves
l = TLatex(x1,y1,text)
l.SetNDC(1)
l.SetTextFont(font)
l.SetTextSize(size)
l.SetTextColor(color)
l.SetTextAlign(align)
l.Draw()
paves += [l]
# OPTION PARSER ####################################################################################
def parser(mp=None):
if mp==None: mp = OptionParser()
mgm = OptionGroup(mp,cyan+"Main options"+plain)
mgm.add_option('-o','--outputfile',help=blue+"Name of output file."+plain,dest='outputfile',default="%s/../trigger/rootfiles/vbfHbb.root"%basepath)
mgtc = OptionGroup(mp,cyan+"Trigger uncertainty settings"+plain)
# mgtc.add_option('--draw',help='Draw histograms from root file (fill if not present).',action='store_true',default=False)
# mgtc.add_option('--redraw',help='Draw histogram from root file (refill in all cases).',action='store_true',default=False)
mgtc.add_option('--ftwodmaps',help=blue+'Filename for twodmaps.'+plain,dest='ftwodmaps',default="",type="str")
mgtc.add_option('--fdistmaps',help=blue+'Filename for distmaps.'+plain,dest='fdistmaps',default="",type="str")
mgtc.add_option('-s','--samples',help=blue+'List of samples (distmap).'+plain,dest='samples',type="str",default=[],action='callback',callback=optsplit)
mgtc.add_option('-c','--categories',help=blue+'Pick for categories.'+plain,dest='categories',type="str",default=[],action='callback',callback=optsplit)
mgtc.add_option('-b','--categoryboundaries',help=blue+'Boundaries for categories.'+plain,dest='categoryboundaries',type="str",default=[-1.0,-0.6,0.0,0.70,0.84,1.001],action='callback',callback=optsplit) #],[-1.0,-0.1,0.40,0.80,1.001]
mgtc.add_option('--notext',help='No right margin legends.',default=False,action='store_true')
mp.add_option_group(mgm)
mp.add_option_group(mgtc)
return mp
####################################################################################################
def getTwoDMaps(opts,fout):
fin = TFile(opts.ftwodmaps,"read")
fout.cd()
gDirectory.cd("%s:/"%fout.GetName())
makeDirsRoot(fout,"2DMaps/")
fin.cd()
m = {}
for i in ['JetMon','QCD','JetMon-QCD']:
gDirectory.cd("%s:/2DMaps/%s/"%(fin.GetName(),i))
for j in gDirectory.GetListOfKeys():
if '%s-Rat'%i in j.GetName():
m[i] = fin.Get("2DMaps/%s/%s"%(i,j.GetName()))
m[i].SetName("ScaleFactorMap_%s"%i)
gDirectory.cd("%s:/2DMaps/"%(fout.GetName()))
m[i].Write(m[i].GetName(),TH1.kOverwrite)
gDirectory.cd("%s:/2DMaps/%s/"%(fin.GetName(),i))
fout.Write()
fin.Close()
##################################################
def getDistMaps(opts,fout):
fin = TFile(opts.fdistmaps,"read")
fout.cd()
gDirectory.cd("%s:/"%fout.GetName())
makeDirsRoot(fout,"DistMaps/")
fin.cd()
m = {}
info = None
for i in opts.samples:
gDirectory.cd("%s:/2DMaps/%s/"%(fin.GetName(),i))
for j in gDirectory.GetListOfKeys():
if info==None: info = [x.split("-") for x in re.search("_s(.*)-t(.*)-r(.*)-d(.*)_(.*)",j.GetName()).groups()]
if '%s-Num'%i in j.GetName():
try: cat = re.search('(mvaVBFC|mvaNOMC|CAT)([0-9]{1})',j.GetName()).group(2)
except: cat = "N"
if opts.categories and not cat in opts.categories: continue
m[i] = fin.Get("2DMaps/%s/%s"%(i,j.GetName()))
#print m[i].Integral()
if not m[i].Integral() == 0: m[i].Scale(1./m[i].Integral())
m[i].SetName("DistributionMap_%s_C%s"%(i,cat))
gDirectory.cd("%s:/DistMaps/"%(fout.GetName()))
m[i].Write(m[i].GetName(),TH1.kOverwrite)
gDirectory.cd("%s:/2DMaps/%s/"%(fin.GetName(),i))
for i in info[0]:
if 'mva' in i: del info[0][info[0].index(i)]
fout.Write()
fin.Close()
print
return info
##################################################
def getConvolutions(opts,fout,info):
print Blue+"Determining scale factors + errors: %s (%s)\n"%('-'.join(info[3]),','.join(info[4]))+plain
fout.cd()
gDirectory.cd("%s:/"%fout.GetName())
makeDirsRoot(fout,"Convolutions/")
makeDirsRoot(fout,"ScaleFactors/")
sfmaps = {}
for i in ['JetMon','QCD']:#,'JetMon-QCD']:
sfmaps[i] = fout.Get("2DMaps/ScaleFactorMap_%s"%i)
eff = {}
efferr = {}
gDirectory.cd("%s:/Convolutions/"%(fout.GetName()))
for i in opts.samples:
for c in ["N"]+["%d"%x for x in range(len(opts.categoryboundaries)-1)]:
if opts.categories and not c in opts.categories: continue
distmap = fout.Get("DistMaps/DistributionMap_%s_C%s"%(i,c))
if not distmap: continue
for j in ['JetMon','QCD']:#,'JetMon-QCD']:
convolution = distmap.Clone("Convolution_%s_%s_C%s"%(i,j,c))
#convolution.Scale(1./convolution.Integral())
convolution.Multiply(sfmaps[j])
convolution.Write(convolution.GetName(),TH1.kOverwrite)
integral = ROOT.Double(0.0)
error = ROOT.Double(0.0)
integral = convolution.IntegralAndError(0,convolution.GetNbinsX(),0,convolution.GetNbinsY(),error)
eff[(i,j,c)] = integral
efferr[(i,j,c)] = error
print "%12s |"%"E",
for c in ["N"]+["%d"%x for x in range(len(opts.categoryboundaries)-1)]:
for j in ['JetMon','QCD']:
print "%14s |"%("%s CAT%s"%(j,c)),
print
print "-"*(14 + 17*2*(len(opts.categoryboundaries)))
for i in opts.samples:
print "%12s |"%i,
for c in ["N"]+["%d"%x for x in range(len(opts.categoryboundaries)-1)]:
for j in ['JetMon','QCD']:
print "%14s |"%("%.2f +- %.3f"%(eff[(i,j,c)],efferr[(i,j,c)])),
print
print
sf = {}
sferr = {}
sfplots = {}
sfplots1d = {}
j = 4 if 'VBF' in '-'.join(info[3]) else 0
#sfplot_labels = ["CAT%d (%.2f,%.2f)"%(x,y,z) for (x,y,z) in [(i-1+j if not (i==0 and j==4) else -2,float(opts.categoryboundaries[i]),float(opts.categoryboundaries[i+1])) for i in range(len(opts.categoryboundaries)-1)]]+["ALL"] #["CAT0","CAT1","CAT2","CAT3","CAT4","ALL"]
sfplot_labels = ["CAT%d"%x for x in [i-1+j if not (i==0 and j==4) else -2 for i in range(len(opts.categoryboundaries)-1)]]+["ALL"] #["CAT0","CAT1","CAT2","CAT3","CAT4","ALL"]
print "%12s |"%"E(data/mc)",
for c in ["N"]+["%d"%x for x in range(len(opts.categoryboundaries)-1)]:
print "%17s |"%("CAT%s"%(c)),
print
print "-"*(14 + 20*(len(opts.categoryboundaries)))
gDirectory.cd("%s:/ScaleFactors/"%(fout.GetName()))
for i in opts.samples:
print "%12s |"%i,
# 2D
#binsx = array('f',opts.categoryboundaries+[1.2])
#binsy = array('f',[0,1])
nbins = len(opts.categoryboundaries)
sfplots[i] = TH2F("ScaleFactors_%s"%i,"ScaleFactors_%s"%i,nbins,0,nbins,1,0,1)#len(binsx)-1,binsx,len(binsy)-1,binsy)
sfplots1d[i] = TH1F("ScaleFactors1D_%s"%i,"ScaleFactors1D_%s"%i,nbins,0,nbins)#len(binsx)-1,binsx)
for j in range(sfplots[i].GetNbinsX()): sfplots[i].GetXaxis().SetBinLabel(j+1,sfplot_labels[j])
for j in range(sfplots[i].GetNbinsX()): sfplots1d[i].GetXaxis().SetBinLabel(j+1,sfplot_labels[j])
#sfplots[i].GetXaxis().SetTickLength(0)
for c in ["N"]+["%d"%x for x in range(nbins-1)]:
if opts.categories and not c in opts.categories: continue
if not (i,"JetMon",c) in eff.keys(): continue
d = eff[(i,"JetMon",c)]
q = eff[(i,"QCD",c)]
ed = efferr[(i,"JetMon",c)]
eq = efferr[(i,"QCD",c)]
#print d,q,ed,eq
if not q==0:
sf = d / q
sferr = sqrt( (ed*ed/q/q) + (d*d*eq*eq/q/q/q/q) )
else:
sf = 0
sferr = 0
print "%17s |"%("%.3f +- %.3f %%"%(sf,sferr)),
thisbin = int(c)+1 if not c=="N" else nbins
sfplots[i].SetBinContent(thisbin,1,sf)
sfplots[i].SetBinError(thisbin,1,sferr)
sfplots1d[i].SetBinContent(thisbin,sf)
sfplots1d[i].SetBinError(thisbin,sferr)
print
fout.cd()
gDirectory.cd("%s:/ScaleFactors/"%(fout.GetName()))
sfplots[i].Write(sfplots[i].GetName(),TH1.kOverwrite)
sfplots1d[i].Write(sfplots1d[i].GetName(),TH1.kOverwrite)
print
##################################################
def getCanvases(opts,fout,info):
fout.cd()
gDirectory.cd("%s:/"%fout.GetName())
makeDirsRoot(fout,"Canvases/")
makeDirs("plots/%s/TriggerUncertainty/"%(os.path.split(fout.GetName()))[1][:-5])
numbers = {}
gROOT.SetBatch(1)
gStyle.SetOptStat(0)
gStyle.SetPaintTextFormat(".3f")
gROOT.ProcessLineSync('.x ../common/styleCMSTDR.C')
gStyle.SetPadTopMargin(0.07)
gStyle.SetPadRightMargin(0.04)
gStyle.SetPadBottomMargin(0.09)
gStyle.SetPadLeftMargin(0.14)
gDirectory.cd("%s:/ScaleFactors"%fout.GetName())
c = TCanvas("c","",1800 if not opts.notext else 1600,1200)
for i in gDirectory.GetListOfKeys():
gPad.SetRightMargin(0.25)
text = printText(opts,1-0.1,1-0.15,i.GetName().split('_')[1],"Set A" if "NOM_" in fout.GetName() else ("Set B" if "VBF" in fout.GetName() else "???"),info[4])#,0.020,kBlue-2)
h = fout.Get("ScaleFactors/%s"%(i.GetName()))
h.SetTitle("")
#h.GetXaxis().SetLabelSize(0.032)
#h.GetXaxis().SetLabelSize(0.05)
th2f = (h.IsA().GetName() == "TH2F")
if th2f and 'ScaleFactor' in h.GetName():
h.GetYaxis().SetNdivisions(0)
h.GetYaxis().SetBinLabel(1,"")
h.GetZaxis().SetRangeUser(0.70,0.90)
if 'ScaleFactor' in h.GetName():
h.GetYaxis().SetTitle("Trigger Scale Factor")
h.GetYaxis().SetTitleOffset(0.5 if th2f else 1.0)
c.SetName(h.GetName())
c.cd()
selleg = printSelleg(text.GetY1()-0.1,1-0.15,info[0],info[1])
pave = None
if th2f: h.Draw("colz,error,text")
else:
gPad.SetRightMargin(0.20 if not opts.notext else 0.04)
text.SetX1(text.GetX1()-0.04)
selleg.SetX1(selleg.GetX1()-0.04)
#h.GetXaxis().SetTitleSize(0.06)
#h.GetYaxis().SetTitleSize(0.06)
#h.GetXaxis().SetLabelSize(0.06)
#h.GetYaxis().SetLabelSize(0.05)
#h.GetXaxis().SetLabelOffset(h.GetXaxis().GetLabelOffset()*1.1)
#h.GetYaxis().SetLabelOffset(h.GetYaxis().GetLabelOffset()*1.3)
#h.GetYaxis().SetTitleOffset(h.GetYaxis().GetTitleOffset()/1.1)
h = axisupdate(h)
h.GetXaxis().SetDecimals(kTRUE)
h.GetYaxis().SetDecimals(kTRUE)
h.GetYaxis().SetRangeUser(0,1.2)#round(h.GetMaximum()*1.5,1))
h.SetLineWidth(3)
h.DrawCopy("axis")
tb = TBox(h.GetBinLowEdge(1),0,h.GetBinLowEdge(2),1.2)
tb.SetLineColor(kGray)
tb.SetFillColor(kGray)
tb.SetFillStyle(1001)
tb.Draw("same")
h.DrawCopy("hist,same")
h.SetMarkerSize(1.5)
h.SetFillColor(kBlue)
h.SetFillStyle(3018)#3018)
h.Draw("e2same,text70")
gPad.Update()
gPad.RedrawAxis()
error=[None]*h.GetNbinsX()
for i in range(1,h.GetNbinsX()+1):
if not 'ALL' in h.GetXaxis().GetBinLabel(i) and float(re.search('([A-Z]*)([0-9+-]*)',h.GetXaxis().GetBinLabel(i)).group(2))<0:
ctr = h.GetXaxis().GetBinCenter(i)
wid = h.GetXaxis().GetBinWidth(i)
pave = TPave(ctr-wid/2.,gPad.GetUymin(),ctr+wid/2.,gPad.GetUymax())
pave.SetFillColor(kGray)
pave.SetFillStyle(1001)
#pave.Draw("same")
if h.GetBinContent(i)==0: continue
error[i-1] = extraText(i-h.GetBinWidth(i)/2.,h.GetBinContent(i),"#pm %.3f"%h.GetBinError(i))
error[i-1].Draw("same")
# print error[i-1].Print()
# print "%10s %10s %12.3f / %12.3f = %12.6f"%(h.GetName(), h.GetXaxis().GetBinLabel(i), h.GetBinContent(i), h.GetBinError(i), h.GetBinError(i)/h.GetBinContent(i))
if not 'ALL' in h.GetXaxis().GetBinLabel(i):
numbers[(h.GetName().split('_')[1],re.search("([A-Z]*)([0-9\-+]{1,2})",h.GetXaxis().GetBinLabel(i)).group(2))] = h.GetBinError(i)/h.GetBinContent(i)
# print
gPad.Update()
line = TLine(h.GetNbinsX()-1,0.0,h.GetNbinsX()-1,gPad.GetUymax())
line.SetLineWidth(4)
line.Draw("same")
#text.Draw("same")
#if not opts.notext: selleg.Draw("same")
samplenames = {"GF":"GF","Tall": "Top", "ZJets":"Z+jets","QCD":"QCD","VBF125":"VBF"}
for lline in text.GetListOfLines():
if "sample" in lline.GetTitle():
sample = re.search("sample: (.*)",lline.GetTitle()).group(1)
if not sample in samplenames: continue
putPave(samplenames[sample]+" sample",0.5,gStyle.GetPadBottomMargin()+0.08,align=22,font=62,size=0.036)
print sample
print
pcms1 = TPaveText(gPad.GetLeftMargin(),1.-gPad.GetTopMargin(),0.3,1.,"NDC")
pcms1.SetTextAlign(12)
pcms1.SetTextFont(62)
pcms1.SetTextSize(gPad.GetTopMargin()*2.5/4.0)
pcms1.SetFillStyle(-1)
pcms1.SetBorderSize(0)
pcms1.AddText("CMS")
#pcms1.Draw()
pcms2 = TPaveText(0.6,1.-gPad.GetTopMargin(),1.-gPad.GetRightMargin()+0.015,1.,"NDC")
pcms2.SetTextAlign(32)
pcms2.SetTextFont(62)
pcms2.SetTextSize(gPad.GetTopMargin()*2.5/4.0)
pcms2.SetFillStyle(-1)
pcms2.SetBorderSize(0)
pcms2.AddText("%.1f fb^{-1} (8 TeV)"%(19.8 if 'NOM' in fout.GetName() else 18.3))
#pcms2.Draw()
gStyle.SetPaintTextFormat(".3g")
putPave("Set %s selection"%("A" if 'NOM_' in fout.GetName() else "B"),gStyle.GetPadLeftMargin()+0.01,1.-0.5*gStyle.GetPadTopMargin(),align=12,font=62)
putPave("%.1f fb^{-1} (8 TeV)"%(19.8 if 'NOM_' in fout.GetName() else 18.3),1.-gStyle.GetPadRightMargin()-0.01,1.-0.5*gStyle.GetPadTopMargin(),align=32,font=42)
gPad.Update()
gDirectory.cd("%s:/Canvases"%(fout.GetName()))
c.Update()
c.Write(c.GetName(),TH1.kOverwrite)
gPad.RedrawAxis()
c.SaveAs("plots/%s/TriggerUncertainty/%s%s.pdf"%(os.path.split(fout.GetName())[1][:-5],c.GetName(),'' if not opts.notext else '_noleg'))
c.SaveAs("plots/%s/TriggerUncertainty/%s%s.png"%(os.path.split(fout.GetName())[1][:-5],c.GetName(),'' if not opts.notext else '_noleg'))
c.Update()
print purple+"Scale factor plots at: plots/%s/TriggerUncertainty/*ScaleFactor*.png"%(os.path.split(fout.GetName())[1][:-5])+plain
# table
labels = ['Category']+list(set([x for (x,y) in numbers.iterkeys() if not x in ['VBF115','VBF120','VBF130','VBF135','WJets']]))
print "\\begin{table}[htbf]\n\t\\caption{Relative uncertainties for trigger scale factors per category.} \\small \\centering \n\t\\begin{tabular}{|*{%d}{c|}} \\hline"%(len(labels))
print "\t",
for il,l in enumerate(sorted(labels,key=lambda x:('Category' in x,'VBF125' in x,'GF' in x,'QCD' in x,'ZJets' in x,'Tall' in x),reverse=True)):
print "%20s%s"%("\makebox[1.8cm]{%s}"%l.replace('Tall','Top').replace('GF','GF125')," &" if il<len(labels)-1 else ""),
print "\\\\ \\hline \\hline"
for ic,cat in enumerate(sorted([y for (x,y) in numbers.iterkeys() if x==labels[-1]])):
print "%20s &"%cat,
for il,l in enumerate(sorted(list(set(labels)-set(['Category'])),key=lambda x:('Category' in x, 'VBF125' in x,'GF' in x,'QCD' in x,'ZJets' in x,'Tall' in x),reverse=True)):
print "%20.4f%s"%(numbers[(l,cat)]," &" if not l=="Tall" else ""),
print "\\\\ \\hline",
if not 'VBF' in fout.GetName() and ic==len([y for (x,y) in numbers.iterkeys() if x==labels[-1]])-1: print " \\hline"
else: print ""
print "\t\\end{tabular}\n\\end{table}"
c.Close()
####################################################################################################
def extraText(hcenter,vcenter,line,fontSize=0.027,fontColor=kBlack):
text = TPaveText(hcenter-0.2+0.17,vcenter-0.2+0.07,hcenter+0.2+0.17,vcenter+0.2+0.07)
text.SetTextAlign(22)
text.SetTextSize(fontSize)
text.SetTextColor(fontColor)
text.SetFillStyle(0)
text.SetBorderSize(0)
theline = text.AddText(line)
theline.SetTextAngle(70)
return text
def printText(opts,top,left,sample,selection,mapvars,fontSize=0.020,fontColor=kBlack):
varnames = {'jetBtag00':'bjet0 CSV','jetBtag10':'bjet1 CSV','mqq1':'m_{q#bar{q}}','mqq2':'m_{q#bar{q}}','dEtaqq1':'#Delta#eta_{q#bar{q}}','dEtaqq2':'#Delta#eta_{q#bar{q}}'}
nlines = 6 if not opts.notext else 3
if opts.notext:
left = 0.33
top = 0.30
fontSize = fontSize*1.4
right = left + 0.13
###bottom = top - nlines*(fontSize+0.018)
bottom = top - nlines*(0.05)
text = TPaveText(left,bottom,right,top,"NDC")
text.SetFillColor(0)
text.SetFillStyle(0)
text.SetBorderSize(0)
text.SetTextFont(62)
###text.SetTextSize(fontSize)
text.SetTextSize(0.04)
text.SetTextColor(fontColor)
text.SetTextAlign(11)
if not opts.notext:
text.AddText("CMS preliminary")
text.AddText("VBF H#rightarrow b#bar{b}")
text.AddText("%.1f fb^{-1}"%(19800./1000. if selection=="Set A" else 18300./1000. if selection=="Set B" else "???")) # NOM VBF
text.AddText("%s selection"%selection.replace('VBF','Set B').replace('NOM','Set A'))
text.AddText("sample: %s"%sample)
text.AddText("2D map: %s"%(' & '.join([(varnames[x] if x in varnames else x) for x in mapvars])))
if not opts.notext:
thisline = text.AddText("#varepsilon = #frac{#varepsilon_{%s #times data}}{#varepsilon_{%s #times qcd}}"%(sample,sample))
thisline.SetTextAlign(13)
return text
def printSelleg(top,left,selection,trigger,fontSize=0.020,fontColor=kBlack):
rows = sum([1 for x in selection])+1
selleg = getSelLegend(left,top - rows*(0.030),1-0.02,top,None,0,0,1,0.018)
for iline,line in enumerate(sorted([x.strip() for x in selection])): selleg.AddText('%s %s'%('sel:' if iline==0 else ' '*4,line))
selleg.AddText('trg: %s (MC)'%(','.join(trigger)))
#selleg.AddText(' %s (data)'%(','.join(datatrigger)))
return selleg
####################################################################################################
def mkTriggerUncertainties():
global paves
## init main (including option parsing)
#opts,samples,variables,loadedSamples,fout,KFWghts = main.main(parser())
mp = parser()
opts,args = mp.parse_args()
fout = TFile(opts.outputfile,"recreate")
gROOT.ProcessLine("gErrorIgnoreLevel = kWarning;")
getTwoDMaps(opts,fout)
info = getDistMaps(opts,fout)
getConvolutions(opts,fout,info)
getCanvases(opts,fout,info)
fout.Close()
if __name__=='__main__':
mkTriggerUncertainties()
| [
"sara.alderweireldt@cern.ch"
] | sara.alderweireldt@cern.ch |
a15087598d7af3aeeed237ca9fae7764b31e834f | b39a57a3ae19283c08decaf2f37fa3451369cc94 | /sitetree/sitetreeapp.py | 8d0ffbb6d7df0e1d8f4a1a36c2010570451221d6 | [] | no_license | jy754550444/drugapp | 5008f75f32e573d21422ee3ccbbe17072be9d56b | ddf38244911887f00d93f7be6d850ec56276451e | refs/heads/master | 2019-03-21T07:22:07.674174 | 2018-04-09T01:29:38 | 2018-04-09T01:29:38 | 124,162,545 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 37,466 | py | from __future__ import unicode_literals
import warnings
from collections import defaultdict
from copy import deepcopy
from functools import partial
from threading import local
from django import VERSION
from django.conf import settings
from django.core.cache import cache
from django.db.models import signals
from django.template.base import (
FilterExpression, Lexer, Parser, Token, Variable, VariableDoesNotExist, TOKEN_BLOCK, UNKNOWN_SOURCE, TOKEN_TEXT,
TOKEN_VAR, VARIABLE_TAG_START)
from django.template.defaulttags import url as url_tag
from django.template.loader import get_template
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
from django.utils.http import urlquote
from django.utils.translation import get_language
from .exceptions import SiteTreeError
from .settings import (
ALIAS_TRUNK, ALIAS_THIS_CHILDREN, ALIAS_THIS_SIBLINGS, ALIAS_THIS_PARENT_SIBLINGS, ALIAS_THIS_ANCESTOR_CHILDREN,
UNRESOLVED_ITEM_MARKER, RAISE_ITEMS_ERRORS_ON_DEBUG, CACHE_TIMEOUT)
from .utils import get_tree_model, get_tree_item_model, import_app_sitetree_module, generate_id_for
if False: # pragma: nocover
from django.template import Context
from .models import TreeItemBase
if VERSION >= (1, 9, 0):
get_lexer = partial(Lexer)
else:
get_lexer = partial(Lexer, origin=UNKNOWN_SOURCE)
MODEL_TREE_CLASS = get_tree_model()
MODEL_TREE_ITEM_CLASS = get_tree_item_model()
_ITEMS_PROCESSOR = None
"""Stores tree items processor callable or None."""
_I18N_TREES = []
"""Stores aliases of trees supporting internationalization."""
_DYNAMIC_TREES = {}
"""Holds trees dynamically loaded from project apps."""
_IDX_ORPHAN_TREES = 'orphans'
"""Dictionary index in `_DYNAMIC_TREES` for orphaned trees list."""
_IDX_TPL = '%s|:|%s'
"""Name template used as dictionary index in `_DYNAMIC_TREES`."""
_THREAD_LOCAL = local()
_THREAD_SITETREE = 'sitetree'
_CONTEXT_FLATTEN = VERSION >= (1, 11)
_UNSET = set() # Sentinel
def get_sitetree():
"""Returns SiteTree (thread-singleton) object, implementing utility methods.
:rtype: SiteTree
"""
sitetree = getattr(_THREAD_LOCAL, _THREAD_SITETREE, None)
if sitetree is None:
sitetree = SiteTree()
setattr(_THREAD_LOCAL, _THREAD_SITETREE, sitetree)
return sitetree
def register_items_hook(func):
"""Registers a hook callable to process tree items right before they are passed to templates.
Callable should be able to:
a) handle ``tree_items`` and ``tree_sender`` key params.
``tree_items`` will contain a list of extended TreeItem objects ready to pass to template.
``tree_sender`` will contain navigation type identifier
(e.g.: `menu`, `sitetree`, `breadcrumbs`, `menu.children`, `sitetree.children`)
b) return a list of extended TreeItems objects to pass to template.
Example::
# Put the following code somewhere where it'd be triggered as expected. E.g. in app view.py.
# First import the register function.
from sitetree.sitetreeapp import register_items_hook
# The following function will be used as items processor.
def my_items_processor(tree_items, tree_sender):
# Suppose we want to process only menu child items.
if tree_sender == 'menu.children':
# Lets add 'Hooked: ' to resolved titles of every item.
for item in tree_items:
item.title_resolved = 'Hooked: %s' % item.title_resolved
# Return items list mutated or not.
return tree_items
# And we register items processor.
register_items_hook(my_items_processor)
:param func:
"""
global _ITEMS_PROCESSOR
_ITEMS_PROCESSOR = func
def register_i18n_trees(aliases):
"""Registers aliases of internationalized sitetrees.
Internationalized sitetrees are those, which are dubbed by other trees having
locale identifying suffixes in their aliases.
Lets suppose ``my_tree`` is the alias of a generic tree. This tree is the one
that we call by its alias in templates, and it is the one which is used
if no i18n version of that tree is found.
Given that ``my_tree_en``, ``my_tree_ru`` and other ``my_tree_{locale-id}``-like
trees are considered internationalization sitetrees. These are used (if available)
in accordance with current locale used by project.
Example::
# Put the following code somewhere where it'd be triggered as expected. E.g. in main urls.py.
# First import the register function.
from sitetree.sitetreeapp import register_i18n_trees
# At last we register i18n trees.
register_i18n_trees(['my_tree', 'my_another_tree'])
:param aliases:
"""
global _I18N_TREES
_I18N_TREES = aliases
def register_dynamic_trees(trees, *args, **kwargs):
"""Registers dynamic trees to be available for `sitetree` runtime.
Expects `trees` to be an iterable with structures created with `compose_dynamic_tree()`.
Example::
register_dynamic_trees(
# Get all the trees from `my_app`.
compose_dynamic_tree('my_app'),
# Get all the trees from `my_app` and attach them to `main` tree root.
compose_dynamic_tree('my_app', target_tree_alias='main'),
# Get all the trees from `my_app` and attach them to `has_dynamic` aliased item in `main` tree.
compose_dynamic_tree('articles', target_tree_alias='main', parent_tree_item_alias='has_dynamic'),
# Define a tree right on the registration.
compose_dynamic_tree((
tree('dynamic', items=(
item('dynamic_1', 'dynamic_1_url', children=(
item('dynamic_1_sub_1', 'dynamic_1_sub_1_url'),
)),
item('dynamic_2', 'dynamic_2_url'),
)),
)),
)
Accepted kwargs:
bool reset_cache: Resets tree cache, to introduce all changes made to a tree immediately.
"""
global _DYNAMIC_TREES
if _IDX_ORPHAN_TREES not in _DYNAMIC_TREES:
_DYNAMIC_TREES[_IDX_ORPHAN_TREES] = {}
if isinstance(trees, dict): # New `less-brackets` style registration.
trees = [trees]
trees.extend(args)
for tree in trees:
if tree is not None and tree['sitetrees'] is not None:
if tree['tree'] is None:
# Register trees as they are defined in app.
for st in tree['sitetrees']:
if st.alias not in _DYNAMIC_TREES[_IDX_ORPHAN_TREES]:
_DYNAMIC_TREES[_IDX_ORPHAN_TREES][st.alias] = []
_DYNAMIC_TREES[_IDX_ORPHAN_TREES][st.alias].append(st)
else:
# Register tree items as parts of existing trees.
index = _IDX_TPL % (tree['tree'], tree['parent_item'])
if index not in _DYNAMIC_TREES:
_DYNAMIC_TREES[index] = []
_DYNAMIC_TREES[index].extend(tree['sitetrees'])
reset_cache = kwargs.get('reset_cache', False)
if reset_cache:
cache_ = get_sitetree().cache
cache_.empty()
cache_.reset()
def get_dynamic_trees():
"""Returns a dictionary with currently registered dynamic trees."""
return _DYNAMIC_TREES
def compose_dynamic_tree(src, target_tree_alias=None, parent_tree_item_alias=None, include_trees=None):
"""Returns a structure describing a dynamic sitetree.utils
The structure can be built from various sources,
:param str|iterable src: If a string is passed to `src`, it'll be treated as the name of an app,
from where one want to import sitetrees definitions. `src` can be an iterable
of tree definitions (see `sitetree.toolbox.tree()` and `item()` functions).
:param str|unicode target_tree_alias: Static tree alias to attach items from dynamic trees to.
:param str|unicode parent_tree_item_alias: Tree item alias from a static tree to attach items from dynamic trees to.
:param list include_trees: Sitetree aliases to filter `src`.
:rtype: dict
"""
def result(sitetrees=src):
if include_trees is not None:
sitetrees = [tree for tree in sitetrees if tree.alias in include_trees]
return {
'app': src,
'sitetrees': sitetrees,
'tree': target_tree_alias,
'parent_item': parent_tree_item_alias}
if isinstance(src, six.string_types):
# Considered to be an application name.
try:
module = import_app_sitetree_module(src)
return None if module is None else result(getattr(module, 'sitetrees', None))
except ImportError as e:
if settings.DEBUG:
warnings.warn('Unable to register dynamic sitetree(s) for `%s` application: %s. ' % (src, e))
return None
return result()
@python_2_unicode_compatible
class LazyTitle(object):
"""Lazily resolves any variable found in a title of an item.
Produces resolved title as unicode representation."""
def __init__(self, title):
"""
:param str|unicode title:
"""
self.title = title
def __str__(self):
my_lexer = get_lexer(self.title)
my_tokens = my_lexer.tokenize()
# Deliberately strip off template tokens that are not text or variable.
for my_token in my_tokens:
if my_token.token_type not in (TOKEN_TEXT, TOKEN_VAR):
my_tokens.remove(my_token)
my_parser = Parser(my_tokens)
return my_parser.parse().render(get_sitetree().current_page_context)
def __eq__(self, other):
return self.__str__() == other
class Cache(object):
"""Contains cache-related stuff."""
def __init__(self):
self.cache = None
cache_empty = self.empty
# Listen for signals from the models.
signals.post_save.connect(cache_empty, sender=MODEL_TREE_CLASS)
signals.post_save.connect(cache_empty, sender=MODEL_TREE_ITEM_CLASS)
signals.post_delete.connect(cache_empty, sender=MODEL_TREE_ITEM_CLASS)
# Listen to the changes in item permissions table.
signals.m2m_changed.connect(cache_empty, sender=MODEL_TREE_ITEM_CLASS.access_permissions)
self.init()
@classmethod
def reset(cls):
"""Instructs sitetree to drop and recreate cache.
Could be used to show up tree changes made in a different process.
"""
cache.set('sitetrees_reset', True)
def init(self):
"""Initializes local cache from Django cache."""
# Drop cache flag set by .reset() method.
cache.get('sitetrees_reset') and self.empty(init=False)
self.cache = cache.get(
'sitetrees', {'sitetrees': {}, 'parents': {}, 'items_by_ids': {}, 'tree_aliases': {}})
def save(self):
"""Saves sitetree data to Django cache."""
cache.set('sitetrees', self.cache, CACHE_TIMEOUT)
def empty(self, **kwargs):
"""Empties cached sitetree data."""
cache.delete('sitetrees')
cache.delete('sitetrees_reset')
kwargs.get('init', True) and self.init()
def get_entry(self, entry_name, key):
"""Returns cache entry parameter value by its name.
:param str|unicode entry_name:
:param key:
:return:
"""
return self.cache[entry_name].get(key, False)
def update_entry_value(self, entry_name, key, value):
"""Updates cache entry parameter with new data.
:param str|unicode entry_name:
:param key:
:param value:
"""
if key not in self.cache[entry_name]:
self.cache[entry_name][key] = {}
self.cache[entry_name][key].update(value)
def set_entry(self, entry_name, key, value):
"""Replaces entire cache entry parameter data by its name with new data.
:param str|unicode entry_name:
:param key:
:param value:
"""
self.cache[entry_name][key] = value
class SiteTree(object):
"""Main logic handler."""
def __init__(self):
self.init(context=None)
def init(self, context):
"""Initializes sitetree to handle new request.
:param Context|None context:
"""
self.cache = Cache()
self.current_page_context = context
self.current_request = context.get('request', None) if context else None
self.current_lang = get_language()
self._current_app_is_admin = None
self._current_user_permissions = _UNSET
self._items_urls = {} # Resolved urls are cache for a request.
self._current_items = {}
def resolve_tree_i18n_alias(self, alias):
"""Resolves internationalized tree alias.
Verifies whether a separate sitetree is available for currently active language.
If so, returns i18n alias. If not, returns the initial alias.
:param str|unicode alias:
:rtype: str|unicode
"""
if alias not in _I18N_TREES:
return alias
current_language_code = self.current_lang
i18n_tree_alias = '%s_%s' % (alias, current_language_code)
trees_count = self.cache.get_entry('tree_aliases', i18n_tree_alias)
if trees_count is False:
trees_count = MODEL_TREE_CLASS.objects.filter(alias=i18n_tree_alias).count()
self.cache.set_entry('tree_aliases', i18n_tree_alias, trees_count)
if trees_count:
alias = i18n_tree_alias
return alias
@staticmethod
def attach_dynamic_tree_items(tree_alias, src_tree_items):
"""Attaches dynamic sitetrees items registered with `register_dynamic_trees()`
to an initial (source) items list.
:param str|unicode tree_alias:
:param list src_tree_items:
:rtype: list
"""
if not _DYNAMIC_TREES:
return src_tree_items
# This guarantees that a dynamic source stays intact,
# no matter how dynamic sitetrees are attached.
trees = deepcopy(_DYNAMIC_TREES)
items = []
if not src_tree_items:
if _IDX_ORPHAN_TREES in trees and tree_alias in trees[_IDX_ORPHAN_TREES]:
for tree in trees[_IDX_ORPHAN_TREES][tree_alias]:
items.extend(tree.dynamic_items)
else:
# TODO Seems to be underoptimized %)
# Tree item attachment by alias.
for static_item in list(src_tree_items):
items.append(static_item)
if not static_item.alias:
continue
idx = _IDX_TPL % (tree_alias, static_item.alias)
if idx not in trees:
continue
for tree in trees[idx]:
tree.alias = tree_alias
for dyn_item in tree.dynamic_items:
if dyn_item.parent is None:
dyn_item.parent = static_item
# Unique IDs are required for the same trees attached
# to different parents.
dyn_item.id = generate_id_for(dyn_item)
items.append(dyn_item)
# Tree root attachment.
idx = _IDX_TPL % (tree_alias, None)
if idx in _DYNAMIC_TREES:
trees = deepcopy(_DYNAMIC_TREES)
for tree in trees[idx]:
tree.alias = tree_alias
items.extend(tree.dynamic_items)
return items
def current_app_is_admin(self):
"""Returns boolean whether current application is Admin contrib.
:rtype: bool
"""
is_admin = self._current_app_is_admin
if is_admin is None:
context = self.current_page_context
current_app = getattr(
# Try from request.resolver_match.app_name
getattr(context.get('request', None), 'resolver_match', None), 'app_name',
# Try from global context obj.
getattr(context, 'current_app', None))
if current_app is None: # Try from global context dict.
current_app = context.get('current_app', '')
is_admin = current_app == 'admin'
self._current_app_is_admin = is_admin
return is_admin
def get_sitetree(self, alias):
"""Gets site tree items from the given site tree.
Caches result to dictionary.
Returns (tree alias, tree items) tuple.
:param str|unicode alias:
:rtype: tuple
"""
cache_ = self.cache
get_cache_entry = cache_.get_entry
set_cache_entry = cache_.set_entry
caching_required = False
if not self.current_app_is_admin():
# We do not need i18n for a tree rendered in Admin dropdown.
alias = self.resolve_tree_i18n_alias(alias)
sitetree = get_cache_entry('sitetrees', alias)
if not sitetree:
sitetree = (
MODEL_TREE_ITEM_CLASS.objects.
select_related('parent', 'tree').
prefetch_related('access_permissions__content_type').
filter(tree__alias__exact=alias).
order_by('parent__sort_order', 'sort_order'))
sitetree = self.attach_dynamic_tree_items(alias, sitetree)
set_cache_entry('sitetrees', alias, sitetree)
caching_required = True
parents = get_cache_entry('parents', alias)
if not parents:
parents = defaultdict(list)
for item in sitetree:
parent = getattr(item, 'parent')
parents[parent].append(item)
set_cache_entry('parents', alias, parents)
# Prepare items by ids cache if needed.
if caching_required:
# We need this extra pass to avoid future problems on items depth calculation.
for item in sitetree:
cache_.update_entry_value('items_by_ids', alias, {item.id: item})
for item in sitetree:
if caching_required:
item.has_children = False
if not hasattr(item, 'depth'):
item.depth = self.calculate_item_depth(alias, item.id)
item.depth_range = range(item.depth)
# Resolve item permissions.
if item.access_restricted:
permissions_src = (
item.permissions if getattr(item, 'is_dynamic', False)
else item.access_permissions.all())
item.perms = set(
['%s.%s' % (perm.content_type.app_label, perm.codename) for perm in permissions_src])
# Contextual properties.
item.url_resolved = self.url(item)
item.title_resolved = LazyTitle(item.title) if VARIABLE_TAG_START in item.title else item.title
item.is_current = False
item.in_current_branch = False
# Get current item for the given sitetree.
self.get_tree_current_item(alias)
# Save sitetree data into cache if needed.
if caching_required:
cache_.save()
return alias, sitetree
def calculate_item_depth(self, tree_alias, item_id, depth=0):
"""Calculates depth of the item in the tree.
:param str|unicode tree_alias:
:param int item_id:
:param int depth:
:rtype: int
"""
item = self.get_item_by_id(tree_alias, item_id)
if hasattr(item, 'depth'):
depth = item.depth + depth
else:
if item.parent is not None:
depth = self.calculate_item_depth(tree_alias, item.parent.id, depth + 1)
return depth
def get_item_by_id(self, tree_alias, item_id):
"""Get the item from the tree by its ID.
:param str|unicode tree_alias:
:param int item_id:
:rtype: TreeItemBase
"""
return self.cache.get_entry('items_by_ids', tree_alias)[item_id]
def get_tree_current_item(self, tree_alias):
"""Resolves current tree item of 'tree_alias' tree matching current
request path against URL of given tree item.
:param str|unicode tree_alias:
:rtype: TreeItemBase
"""
current_item = self._current_items.get(tree_alias, _UNSET)
if current_item is not _UNSET:
if current_item is not None:
current_item.is_current = True # Could be reset by .get_sitetree()
return current_item
current_item = None
if self.current_app_is_admin():
self._current_items[tree_alias] = current_item
return None
# urlquote is an attempt to support non-ascii in url.
current_url = self.current_request.path
if isinstance(current_url, str):
current_url = current_url.encode('UTF-8')
if current_url:
current_url = urlquote(current_url)
for url_item, url in self._items_urls.items():
# Iterate each as this dict may contains "current" items for various trees.
if url != current_url:
continue
url_item.is_current = True
if url_item.tree.alias == tree_alias:
current_item = url_item
if current_item is not None:
self._current_items[tree_alias] = current_item
return current_item
def url(self, sitetree_item, context=None):
"""Resolves item's URL.
:param TreeItemBase sitetree_item: TreeItemBase heir object, 'url' property of which
is processed as URL pattern or simple URL.
:param Context context:
:rtype: str|unicode
"""
context = context or self.current_page_context
if not isinstance(sitetree_item, MODEL_TREE_ITEM_CLASS):
sitetree_item = self.resolve_var(sitetree_item, context)
resolved_url = self._items_urls.get(sitetree_item)
if resolved_url is not None:
return resolved_url
# Resolve only if item's URL is marked as pattern.
if sitetree_item.urlaspattern:
url = sitetree_item.url
view_path = url
all_arguments = []
if ' ' in url:
view_path = url.split(' ')
# We should try to resolve URL parameters from site tree item.
for view_argument in view_path[1:]:
resolved = self.resolve_var(view_argument)
# In case of non-ascii data we leave variable unresolved.
if isinstance(resolved, six.text_type):
if resolved.encode('ascii', 'ignore').decode('ascii') != resolved:
resolved = view_argument
# We enclose arg in double quotes as already resolved.
all_arguments.append('"%s"' % resolved)
view_path = view_path[0].strip('"\' ')
url_pattern = "'%s' %s" % (view_path, ' '.join(all_arguments))
else:
url_pattern = '%s' % sitetree_item.url
if sitetree_item.urlaspattern:
# Form token to pass to Django 'url' tag.
url_token = 'url %s as item.url_resolved' % url_pattern
url_tag(
Parser(None),
Token(token_type=TOKEN_BLOCK, contents=url_token)
).render(context)
resolved_url = context['item.url_resolved'] or UNRESOLVED_ITEM_MARKER
else:
resolved_url = url_pattern
self._items_urls[sitetree_item] = resolved_url
return resolved_url
def init_tree(self, tree_alias, context):
"""Initializes sitetree in memory.
Returns tuple with resolved tree alias and items on success.
On fail returns (None, None).
:param str|unicode tree_alias:
:param Context context:
:rtype: tuple
"""
request = context.get('request', None)
if request is None:
raise SiteTreeError(
'Sitetree requires "django.core.context_processors.request" template context processor to be active. '
'If it is, check that your view pushes request data into the template.')
if id(request) != id(self.current_request):
self.init(context)
# Resolve tree_alias from the context.
tree_alias = self.resolve_var(tree_alias)
tree_alias, sitetree_items = self.get_sitetree(tree_alias)
if not sitetree_items:
return None, None
return tree_alias, sitetree_items
def get_current_page_title(self, tree_alias, context):
"""Returns resolved from sitetree title for current page.
:param str|unicode tree_alias:
:param Context context:
:rtype: str|unicode
"""
return self.get_current_page_attr('title_resolved', tree_alias, context)
def get_current_page_attr(self, attr_name, tree_alias, context):
"""Returns an arbitrary attribute of a sitetree item resolved as current for current page.
:param str|unicode attr_name:
:param str|unicode tree_alias:
:param Context context:
:rtype: str|unicode
"""
tree_alias, sitetree_items = self.init_tree(tree_alias, context)
current_item = self.get_tree_current_item(tree_alias)
if current_item is None:
if settings.DEBUG and RAISE_ITEMS_ERRORS_ON_DEBUG:
raise SiteTreeError(
'Unable to resolve current sitetree item to get a `%s` for current page. Check whether '
'there is an appropriate sitetree item defined for current URL.' % attr_name)
return ''
return getattr(current_item, attr_name, '')
def get_ancestor_level(self, current_item, depth=1):
"""Returns ancestor of level `deep` recursively
:param TreeItemBase current_item:
:param int depth:
:rtype: TreeItemBase
"""
if current_item.parent is None:
return current_item
if depth <= 1:
return current_item.parent
return self.get_ancestor_level(current_item.parent, depth=depth-1)
def menu(self, tree_alias, tree_branches, context):
"""Builds and returns menu structure for 'sitetree_menu' tag.
:param str|unicode tree_alias:
:param str|unicode tree_branches:
:param Context context:
:rtype: list|str
"""
tree_alias, sitetree_items = self.init_tree(tree_alias, context)
if not sitetree_items:
return ''
tree_branches = self.resolve_var(tree_branches)
parent_isnull = False
parent_ids = []
parent_aliases = []
current_item = self.get_tree_current_item(tree_alias)
self.tree_climber(tree_alias, current_item)
# Support item addressing both through identifiers and aliases.
for branch_id in tree_branches.split(','):
branch_id = branch_id.strip()
if branch_id == ALIAS_TRUNK:
parent_isnull = True
elif branch_id == ALIAS_THIS_CHILDREN and current_item is not None:
branch_id = current_item.id
parent_ids.append(branch_id)
elif branch_id == ALIAS_THIS_ANCESTOR_CHILDREN and current_item is not None:
branch_id = self.get_ancestor_item(tree_alias, current_item).id
parent_ids.append(branch_id)
elif branch_id == ALIAS_THIS_SIBLINGS and current_item is not None and current_item.parent is not None:
branch_id = current_item.parent.id
parent_ids.append(branch_id)
elif branch_id == ALIAS_THIS_PARENT_SIBLINGS and current_item is not None:
branch_id = self.get_ancestor_level(current_item, depth=2).id
parent_ids.append(branch_id)
elif branch_id.isdigit():
parent_ids.append(int(branch_id))
else:
parent_aliases.append(branch_id)
menu_items = []
for item in sitetree_items:
if not item.hidden and item.inmenu and self.check_access(item, context):
if item.parent is None:
if parent_isnull:
menu_items.append(item)
else:
if item.parent.id in parent_ids or item.parent.alias in parent_aliases:
menu_items.append(item)
menu_items = self.apply_hook(menu_items, 'menu')
self.update_has_children(tree_alias, menu_items, 'menu')
return menu_items
@classmethod
def apply_hook(cls, items, sender):
"""Applies item processing hook, registered with ``register_item_hook()``
to items supplied, and returns processed list.
Returns initial items list if no hook is registered.
:param list items:
:param str|unicode sender: menu, breadcrumbs, sitetree, {type}.children, {type}.has_children
:rtype: list
"""
if _ITEMS_PROCESSOR is None:
return items
return _ITEMS_PROCESSOR(tree_items=items, tree_sender=sender)
def check_access(self, item, context):
"""Checks whether a current user has an access to a certain item.
:param TreeItemBase item:
:param Context context:
:rtype: bool
"""
if hasattr(self.current_request.user.is_authenticated, '__call__'):
authenticated = self.current_request.user.is_authenticated()
else:
authenticated = self.current_request.user.is_authenticated
if item.access_loggedin and not authenticated:
return False
if item.access_guest and authenticated:
return False
if item.access_restricted:
user_perms = self._current_user_permissions
if user_perms is _UNSET:
user_perms = set(context['user'].get_all_permissions())
self._current_user_permissions = user_perms
if item.access_perm_type == MODEL_TREE_ITEM_CLASS.PERM_TYPE_ALL:
if len(item.perms) != len(item.perms.intersection(user_perms)):
return False
else:
if not len(item.perms.intersection(user_perms)):
return False
return True
def breadcrumbs(self, tree_alias, context):
"""Builds and returns breadcrumb trail structure for 'sitetree_breadcrumbs' tag.
:param str|unicode tree_alias:
:param Context context:
:rtype: list|str
"""
tree_alias, sitetree_items = self.init_tree(tree_alias, context)
if not sitetree_items:
return ''
current_item = self.get_tree_current_item(tree_alias)
breadcrumbs = []
if current_item is not None:
context_ = self.current_page_context
check_access = self.check_access
get_item_by_id = self.get_item_by_id
def climb(base_item):
"""Climbs up the site tree to build breadcrumb path.
:param TreeItemBase base_item:
"""
if base_item.inbreadcrumbs and not base_item.hidden and check_access(base_item, context_):
breadcrumbs.append(base_item)
if hasattr(base_item, 'parent') and base_item.parent is not None:
climb(get_item_by_id(tree_alias, base_item.parent.id))
climb(current_item)
breadcrumbs.reverse()
items = self.apply_hook(breadcrumbs, 'breadcrumbs')
self.update_has_children(tree_alias, items, 'breadcrumbs')
return items
def tree(self, tree_alias, context):
"""Builds and returns tree structure for 'sitetree_tree' tag.
:param str|unicode tree_alias:
:param Context context:
:rtype: list|str
"""
tree_alias, sitetree_items = self.init_tree(tree_alias, context)
if not sitetree_items:
return ''
tree_items = self.filter_items(self.get_children(tree_alias, None), 'sitetree')
tree_items = self.apply_hook(tree_items, 'sitetree')
self.update_has_children(tree_alias, tree_items, 'sitetree')
return tree_items
def children(self, parent_item, navigation_type, use_template, context):
"""Builds and returns site tree item children structure for 'sitetree_children' tag.
:param TreeItemBase parent_item:
:param str|unicode navigation_type: menu, sitetree
:param str|unicode use_template:
:param Context context:
:rtype: list
"""
# Resolve parent item and current tree alias.
parent_item = self.resolve_var(parent_item, context)
tree_alias, tree_items = self.get_sitetree(parent_item.tree.alias)
# Mark path to current item.
self.tree_climber(tree_alias, self.get_tree_current_item(tree_alias))
tree_items = self.get_children(tree_alias, parent_item)
tree_items = self.filter_items(tree_items, navigation_type)
tree_items = self.apply_hook(tree_items, '%s.children' % navigation_type)
self.update_has_children(tree_alias, tree_items, navigation_type)
my_template = get_template(use_template)
context.push()
context['sitetree_items'] = tree_items
rendered = my_template.render(context.flatten() if _CONTEXT_FLATTEN else context)
context.pop()
return rendered
def get_children(self, tree_alias, item):
"""Returns item's children.
:param str|unicode tree_alias:
:param TreeItemBase|None item:
:rtype: list
"""
if not self.current_app_is_admin():
# We do not need i18n for a tree rendered in Admin dropdown.
tree_alias = self.resolve_tree_i18n_alias(tree_alias)
return self.cache.get_entry('parents', tree_alias)[item]
def update_has_children(self, tree_alias, tree_items, navigation_type):
"""Updates 'has_children' attribute for tree items inplace.
:param str|unicode tree_alias:
:param list tree_items:
:param str|unicode navigation_type: sitetree, breadcrumbs, menu
"""
for tree_item in tree_items:
children = self.get_children(tree_alias, tree_item)
children = self.filter_items(children, navigation_type)
children = self.apply_hook(children, '%s.has_children' % navigation_type)
tree_item.has_children = len(children) > 0
def filter_items(self, items, navigation_type=None):
"""Filters sitetree item's children if hidden and by navigation type.
NB: We do not apply any filters to sitetree in admin app.
:param list items:
:param str|unicode navigation_type: sitetree, breadcrumbs, menu
:rtype: list
"""
if self.current_app_is_admin():
return items
items_filtered = []
context = self.current_page_context
check_access = self.check_access
for item in items:
if item.hidden:
continue
if not check_access(item, context):
continue
if not getattr(item, 'in%s' % navigation_type, True): # Hidden for current nav type
continue
items_filtered.append(item)
return items_filtered
def get_ancestor_item(self, tree_alias, base_item):
"""Climbs up the site tree to resolve root item for chosen one.
:param str|unicode tree_alias:
:param TreeItemBase base_item:
:rtype: TreeItemBase
"""
parent = None
if hasattr(base_item, 'parent') and base_item.parent is not None:
parent = self.get_ancestor_item(tree_alias, self.get_item_by_id(tree_alias, base_item.parent.id))
if parent is None:
return base_item
return parent
def tree_climber(self, tree_alias, base_item):
"""Climbs up the site tree to mark items of current branch.
:param str|unicode tree_alias:
:param TreeItemBase base_item:
"""
if base_item is not None:
base_item.in_current_branch = True
if hasattr(base_item, 'parent') and base_item.parent is not None:
self.tree_climber(tree_alias, self.get_item_by_id(tree_alias, base_item.parent.id))
def resolve_var(self, varname, context=None):
"""Resolves name as a variable in a given context.
If no context specified page context' is considered as context.
:param str|unicode varname:
:param Context context:
:return:
"""
context = context or self.current_page_context
if isinstance(varname, FilterExpression):
varname = varname.resolve(context)
else:
varname = varname.strip()
try:
varname = Variable(varname).resolve(context)
except VariableDoesNotExist:
varname = varname
return varname
| [
"5971158@qq.com"
] | 5971158@qq.com |
78b824aa0d72d6431c4d74492b5aee080e1290b6 | b005d794cfd8e3b063b08d6a266b1e07f0f0f5e9 | /src/webapp/appengine_admin/main.py | 250c885c61e10ae0f5ddfa5bbc954edd335fff42 | [] | no_license | GeoRemindMe/GeoRemindMe_Web | 593c957faa5babb3040da86d94a5d884ad4b2db3 | d441693eedb32c36fe853895110df808a9959941 | refs/heads/master | 2021-01-16T18:29:39.633445 | 2011-11-05T23:50:37 | 2011-11-05T23:50:37 | 1,841,418 | 8 | 5 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from google.appengine.dist import use_library
use_library('django', '1.2')
from django.conf import settings
_ = settings.TEMPLATE_DIRS
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from appengine_admin.views import Admin
from appengine_admin.model_register import ModelAdmin, register
import appengine_admin.models_admin
application = webapp.WSGIApplication([
(r'^(/admin)(.*)$', Admin),
])
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main() | [
"jneight@gmail.com"
] | jneight@gmail.com |
f3e11eff97df0117cee0c81ff107bf314ec74660 | 697e16af90dbba629ea2487be7a630cd3d07a6b7 | /PRICE/APIs/password/responses/set_lockout.py | 56be59c3db4a61dbd6c2b8888ec8780528fc80fb | [] | no_license | rcmhunt71/GeneralRepo | 7a607178531b6d92b6a93e11e9a24e8e3e47e982 | a51b90132bfaac0f5c7e79ae708e81f114f2abb8 | refs/heads/master | 2020-11-27T05:55:17.726026 | 2020-02-18T20:26:14 | 2020-02-18T20:26:14 | 229,328,447 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | from PRICE.base.common.response import CommonResponse
class SetLockout(CommonResponse):
pass
| [
"rcmhunt71@gmail.com"
] | rcmhunt71@gmail.com |
911819b86225ec2196c19e866f1cae7ac2cbe89e | bee9d96912078d68877aa53e0c96537677ec3e6a | /peakpo/ds_powdiff/DiffractionPattern.py | b8b590a3372b5900968724dcec12c537c2c9d524 | [
"Apache-2.0"
] | permissive | SHDShim/PeakPo | ce0a637b6307787dd84fd3dcb3415e752d180c32 | 4c522e147e7715bceba218de58ee185cccd2055e | refs/heads/master | 2022-06-26T11:26:45.097828 | 2022-06-19T22:03:24 | 2022-06-19T22:03:24 | 94,345,216 | 17 | 3 | null | null | null | null | UTF-8 | Python | false | false | 6,673 | py | import numpy as np
import os
import time
import datetime
from utils import writechi, readchi, make_filename
from .background import fit_bg_cheb_auto
class Pattern(object):
"""
This modified from the same object in ds_* modules
"""
def __init__(self, filename=None):
if filename is None:
self.x_raw = None
self.y_raw = None
else:
self.fname = filename
self.read_file(filename)
self.x_bgsub = None
self.y_bgsub = None
self.x_bg = None
self.y_bg = None
self.params_chbg = [20, 10, 20]
def read_file(self, fname):
"""
read a chi file and get raw xy
"""
if fname.endswith('.chi'):
data = np.loadtxt(fname, skiprows=4)
twotheta, intensity = data.T
else:
raise ValueError('Only support CHI, MSA, and EDS formats')
# set file name information
self.fname = fname
self.x_raw = twotheta
self.y_raw = intensity
def _get_section(self, x, y, roi):
if roi[0] >= x.min() and roi[1] <= x.max():
i_roimin = np.abs(x - roi[0]).argmin()
i_roimax = np.abs(x - roi[1]).argmin()
x_roi = x[i_roimin:i_roimax]
y_roi = y[i_roimin:i_roimax]
return x_roi, y_roi
else:
print(str(datetime.datetime.now())[:-7],
": Error: ROI should be smaller than the data range")
return x, y
def get_section(self, roi, bgsub=True):
"""
return a section for viewing and processing
"""
if bgsub:
return self._get_section(self.x_bgsub, self.y_bgsub, roi)
else:
return self._get_section(self.x_raw, self.y_raw, roi)
def _get_bg(self, roi, params=None, yshift=0.):
if params is not None:
self.params_chbg = params
x, y = self._get_section(self.x_raw, self.y_raw, roi)
t_start = time.time()
y_bg = fit_bg_cheb_auto(x, y, self.params_chbg[0],
self.params_chbg[1], self.params_chbg[2])
print(str(datetime.datetime.now())[:-7],
": Bgsub takes {0:.2f}s".format(time.time() - t_start))
self.x_bg = x
self.x_bgsub = x
y_bgsub = y - y_bg
"""
if y_bgsub.min() <= 0:
y_bgsub = y_bgsub - y_bgsub.min() + yshift
"""
self.y_bgsub = y_bgsub
self.y_bg = y_bg
self.roi = roi
def subtract_bg(self, roi, params=None, yshift=10.):
print(str(datetime.datetime.now())[:-7], ": Receive BG subtraction")
self._get_bg(roi, params=params, yshift=yshift)
def get_raw(self):
return self.x_raw, self.y_raw
def get_background(self):
return self.x_bg, self.y_bg
def get_bgsub(self):
return self.x_bgsub, self.y_bgsub
def get_bg(self):
return self.x_bg, self.y_bg
def set_bg(self, x_bg, y_bg, x_bgsub, y_bgsub, roi, bg_params):
self.x_bg = x_bg
self.y_bg = y_bg
self.x_bgsub = x_bgsub
self.y_bgsub = y_bgsub
self.roi = roi
self.params_chbg = bg_params
def get_chbg(self, roi, params=None, chiout=False, yshift=10.):
"""
subtract background from raw data for a roi and then store in
chbg xy
"""
self._get_bg(roi, params=params, yshift=yshift)
if chiout:
# write background file
f_bg = os.path.splitext(self.fname)[0] + '.bg.chi'
text = "Background\n" + "2-theta, CHEB BG:" + \
' '.join(map(str, self.params_chbg)) + "\n\n"
writechi(f_bg, self.x_bgsub, self.y_bg, preheader=text)
# write background subtracted file
f_bgsub = os.path.splitext(self.fname)[0] + '.bgsub.chi'
text = "Background subtracted diffraction pattern\n" + \
"2-theta, CHEB BG:" + ' '.join(map(str, self.params_chbg)) + "\n\n"
writechi(f_bgsub, self.x_bgsub, self.y_bgsub, preheader=text)
def read_bg_from_tempfile(self, temp_dir=None):
bgsub_filen, bg_filen = self.make_temp_filenames(temp_dir=temp_dir)
if os.path.exists(bgsub_filen) and os.path.exists(bg_filen):
roi, bg_params, x_bgsub, y_bgsub = readchi(bgsub_filen)
__, __, x_bg, y_bg = readchi(bg_filen)
self.set_bg(x_bg, y_bg, x_bgsub, y_bgsub, roi, bg_params)
return True
else:
return False
def make_temp_filenames(self, temp_dir=None):
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
bgsub_filen = make_filename(self.fname, 'bgsub.chi',
temp_dir=temp_dir)
bg_filen = make_filename(self.fname, 'bg.chi',
temp_dir=temp_dir)
return bgsub_filen, bg_filen
def temp_files_exist(self, temp_dir=None):
bgsub_filen, bg_filen = self.make_temp_filenames(temp_dir=temp_dir)
if os.path.exists(bgsub_filen) and os.path.exists(bg_filen):
return True
else:
return False
def write_temporary_bgfiles(self, temp_dir):
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
bgsub_filen, bg_filen = self.make_temp_filenames(temp_dir=temp_dir)
x_bgsub, y_bgsub = self.get_bgsub()
x_bg, y_bg = self.get_bg()
preheader_line0 = \
'# BG ROI: {0: .5f}, {1: .5f} \n'.format(self.roi[0], self.roi[1])
preheader_line1 = \
'# BG Params: {0: d}, {1: d}, {2: d} \n'.format(
self.params_chbg[0], self.params_chbg[1], self.params_chbg[2])
preheader_line2 = '\n'
writechi(bgsub_filen, x_bgsub, y_bgsub, preheader=preheader_line0 +
preheader_line1 + preheader_line2)
writechi(bg_filen, x_bg, y_bg, preheader=preheader_line0 +
preheader_line1 + preheader_line2)
class PatternPeakPo(Pattern):
'''
Do not update this.
Exist only for reading old PPSS files.
Do not delete this, if so old PPSS cannot be read.
This is used only for old PPSS file.
'''
def __init__(self):
self.color = 'white'
self.display = False
self.wavelength = 0.3344
def get_invDsp(self):
self.invDsp_raw = np.sin(np.radians(self.x_raw / 2.)) \
* 2. / self.wavelength
self.invDsp_bgsub = np.sin(np.radians(self.x_bgsub / 2.)) \
* 2. / self.wavelength
class AziPatternPeakPo(PatternPeakPo):
def __init__(self):
self.azi_ranges = []
| [
"SHDShim@gmail.com"
] | SHDShim@gmail.com |
556b4329ac5a4c4bac1ac204e61f90253738d209 | 61ef3a8928caa88e874aa79d46203871d0e5bbfb | /p/dist-packages/awscli/customizations/cloudformation/artifact_exporter.py | 428708b8d3abefd48fdc85f9e59d54e4072aa255 | [] | no_license | joshg111/craigslist_kbb | cd941f237821fa7d2a9a8140b2744294948ba56c | a473d0d389612e53a2ad6fe8a18d984474c44623 | refs/heads/master | 2021-01-10T09:35:04.334601 | 2018-05-26T16:05:20 | 2018-05-26T16:05:20 | 44,120,875 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,641 | py | # Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import logging
import os
import tempfile
import zipfile
import contextlib
import uuid
from awscli.compat import six
from six.moves.urllib.parse import urlparse, parse_qs
from contextlib import contextmanager
from awscli.customizations.cloudformation import exceptions
from awscli.customizations.cloudformation.yamlhelper import yaml_dump, \
yaml_parse
LOG = logging.getLogger(__name__)
def is_path_value_valid(path):
return isinstance(path, six.string_types)
def make_abs_path(directory, path):
if is_path_value_valid(path) and not os.path.isabs(path):
return os.path.normpath(os.path.join(directory, path))
else:
return path
def is_s3_url(url):
try:
parse_s3_url(url)
return True
except ValueError:
return False
def is_local_folder(path):
return is_path_value_valid(path) and os.path.isdir(path)
def is_local_file(path):
return is_path_value_valid(path) and os.path.isfile(path)
def parse_s3_url(url,
bucket_name_property="Bucket",
object_key_property="Key",
version_property=None):
if isinstance(url, six.string_types) \
and url.startswith("s3://"):
# Python < 2.7.10 don't parse query parameters from URI with custom
# scheme such as s3://blah/blah. As a workaround, remove scheme
# altogether to trigger the parser "s3://foo/bar?v=1" =>"//foo/bar?v=1"
parsed = urlparse(url[3:])
query = parse_qs(parsed.query)
if parsed.netloc and parsed.path:
result = dict()
result[bucket_name_property] = parsed.netloc
result[object_key_property] = parsed.path.lstrip('/')
# If there is a query string that has a single versionId field,
# set the object version and return
if version_property is not None \
and 'versionId' in query \
and len(query['versionId']) == 1:
result[version_property] = query['versionId'][0]
return result
raise ValueError("URL given to the parse method is not a valid S3 url "
"{0}".format(url))
def upload_local_artifacts(resource_id, resource_dict, property_name,
parent_dir, uploader):
"""
Upload local artifacts referenced by the property at given resource and
return S3 URL of the uploaded object. It is the responsibility of callers
to ensure property value is a valid string
If path refers to a file, this method will upload the file. If path refers
to a folder, this method will zip the folder and upload the zip to S3.
If path is omitted, this method will zip the current working folder and
upload.
If path is already a path to S3 object, this method does nothing.
:param resource_id: Id of the CloudFormation resource
:param resource_dict: Dictionary containing resource definition
:param property_name: Property name of CloudFormation resource where this
local path is present
:param parent_dir: Resolve all relative paths with respect to this
directory
:param uploader: Method to upload files to S3
:return: S3 URL of the uploaded object
:raise: ValueError if path is not a S3 URL or a local path
"""
local_path = resource_dict.get(property_name, None)
if local_path is None:
# Build the root directory and upload to S3
local_path = parent_dir
if is_s3_url(local_path):
# A valid CloudFormation template will specify artifacts as S3 URLs.
# This check is supporting the case where your resource does not
# refer to local artifacts
# Nothing to do if property value is an S3 URL
LOG.debug("Property {0} of {1} is already a S3 URL"
.format(property_name, resource_id))
return local_path
local_path = make_abs_path(parent_dir, local_path)
# Or, pointing to a folder. Zip the folder and upload
if is_local_folder(local_path):
return zip_and_upload(local_path, uploader)
# Path could be pointing to a file. Upload the file
elif is_local_file(local_path):
return uploader.upload_with_dedup(local_path)
raise exceptions.InvalidLocalPathError(
resource_id=resource_id,
property_name=property_name,
local_path=local_path)
def zip_and_upload(local_path, uploader):
with zip_folder(local_path) as zipfile:
return uploader.upload_with_dedup(zipfile)
@contextmanager
def zip_folder(folder_path):
"""
Zip the entire folder and return a file to the zip. Use this inside
a "with" statement to cleanup the zipfile after it is used.
:param folder_path:
:return: Name of the zipfile
"""
filename = os.path.join(
tempfile.gettempdir(), "data-" + uuid.uuid4().hex)
zipfile_name = make_zip(filename, folder_path)
try:
yield zipfile_name
finally:
if os.path.exists(zipfile_name):
os.remove(zipfile_name)
def make_zip(filename, source_root):
zipfile_name = "{0}.zip".format(filename)
source_root = os.path.abspath(source_root)
with open(zipfile_name, 'wb') as f:
zip_file = zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED)
with contextlib.closing(zip_file) as zf:
for root, dirs, files in os.walk(source_root):
for filename in files:
full_path = os.path.join(root, filename)
relative_path = os.path.relpath(
full_path, source_root)
zf.write(full_path, relative_path)
return zipfile_name
@contextmanager
def mktempfile():
directory = tempfile.gettempdir()
filename = os.path.join(directory, uuid.uuid4().hex)
try:
with open(filename, "w+") as handle:
yield handle
finally:
if os.path.exists(filename):
os.remove(filename)
class Resource(object):
"""
Base class representing a CloudFormation resource that can be exported
"""
PROPERTY_NAME = None
def __init__(self, uploader):
self.uploader = uploader
def export(self, resource_id, resource_dict, parent_dir):
if resource_dict is None:
return
property_value = resource_dict.get(self.PROPERTY_NAME, None)
if isinstance(property_value, dict):
LOG.debug("Property {0} of {1} resource is not a URL"
.format(self.PROPERTY_NAME, resource_id))
return
try:
self.do_export(resource_id, resource_dict, parent_dir)
except Exception as ex:
LOG.debug("Unable to export", exc_info=ex)
raise exceptions.ExportFailedError(
resource_id=resource_id,
property_name=self.PROPERTY_NAME,
property_value=property_value,
ex=ex)
def do_export(self, resource_id, resource_dict, parent_dir):
"""
Default export action is to upload artifacts and set the property to
S3 URL of the uploaded object
"""
resource_dict[self.PROPERTY_NAME] = \
upload_local_artifacts(resource_id, resource_dict,
self.PROPERTY_NAME,
parent_dir, self.uploader)
class ResourceWithS3UrlDict(Resource):
"""
Represents CloudFormation resources that need the S3 URL to be specified as
an dict like {Bucket: "", Key: "", Version: ""}
"""
BUCKET_NAME_PROPERTY = None
OBJECT_KEY_PROPERTY = None
VERSION_PROPERTY = None
def __init__(self, uploader):
super(ResourceWithS3UrlDict, self).__init__(uploader)
def do_export(self, resource_id, resource_dict, parent_dir):
"""
Upload to S3 and set property to an dict representing the S3 url
of the uploaded object
"""
artifact_s3_url = \
upload_local_artifacts(resource_id, resource_dict,
self.PROPERTY_NAME,
parent_dir, self.uploader)
resource_dict[self.PROPERTY_NAME] = parse_s3_url(
artifact_s3_url,
bucket_name_property=self.BUCKET_NAME_PROPERTY,
object_key_property=self.OBJECT_KEY_PROPERTY,
version_property=self.VERSION_PROPERTY)
class ServerlessFunctionResource(Resource):
PROPERTY_NAME = "CodeUri"
class ServerlessApiResource(Resource):
PROPERTY_NAME = "DefinitionUri"
class LambdaFunctionResource(ResourceWithS3UrlDict):
PROPERTY_NAME = "Code"
BUCKET_NAME_PROPERTY = "S3Bucket"
OBJECT_KEY_PROPERTY = "S3Key"
VERSION_PROPERTY = "S3ObjectVersion"
class ApiGatewayRestApiResource(ResourceWithS3UrlDict):
PROPERTY_NAME = "BodyS3Location"
BUCKET_NAME_PROPERTY = "Bucket"
OBJECT_KEY_PROPERTY = "Key"
VERSION_PROPERTY = "Version"
class ElasticBeanstalkApplicationVersion(ResourceWithS3UrlDict):
PROPERTY_NAME = "SourceBundle"
BUCKET_NAME_PROPERTY = "S3Bucket"
OBJECT_KEY_PROPERTY = "S3Key"
VERSION_PROPERTY = None
class CloudFormationStackResource(Resource):
"""
Represents CloudFormation::Stack resource that can refer to a nested
stack template via TemplateURL property.
"""
PROPERTY_NAME = "TemplateURL"
def __init__(self, uploader):
super(CloudFormationStackResource, self).__init__(uploader)
def do_export(self, resource_id, resource_dict, parent_dir):
"""
If the nested stack template is valid, this method will
export on the nested template, upload the exported template to S3
and set property to URL of the uploaded S3 template
"""
template_path = resource_dict.get(self.PROPERTY_NAME, None)
if template_path is None or is_s3_url(template_path):
# Nothing to do
return
abs_template_path = make_abs_path(parent_dir, template_path)
if not is_local_file(abs_template_path):
raise exceptions.InvalidTemplateUrlParameterError(
property_name=self.PROPERTY_NAME,
resource_id=resource_id,
template_path=abs_template_path)
exported_template_dict = \
Template(template_path, parent_dir, self.uploader).export()
exported_template_str = yaml_dump(exported_template_dict)
with mktempfile() as temporary_file:
temporary_file.write(exported_template_str)
temporary_file.flush()
url = self.uploader.upload_with_dedup(
temporary_file.name, "template")
resource_dict[self.PROPERTY_NAME] = url
EXPORT_DICT = {
"AWS::Serverless::Function": ServerlessFunctionResource,
"AWS::Serverless::Api": ServerlessApiResource,
"AWS::ApiGateway::RestApi": ApiGatewayRestApiResource,
"AWS::Lambda::Function": LambdaFunctionResource,
"AWS::ElasticBeanstalk::ApplicationVersion":
ElasticBeanstalkApplicationVersion,
"AWS::CloudFormation::Stack": CloudFormationStackResource
}
class Template(object):
"""
Class to export a CloudFormation template
"""
def __init__(self, template_path, parent_dir, uploader,
resources_to_export=EXPORT_DICT):
"""
Reads the template and makes it ready for export
"""
if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)):
raise ValueError("parent_dir parameter must be "
"an absolute path to a folder {0}"
.format(parent_dir))
abs_template_path = make_abs_path(parent_dir, template_path)
template_dir = os.path.dirname(abs_template_path)
with open(abs_template_path, "r") as handle:
template_str = handle.read()
self.template_dict = yaml_parse(template_str)
self.template_dir = template_dir
self.resources_to_export = resources_to_export
self.uploader = uploader
def export(self):
"""
Exports the local artifacts referenced by the given template to an
s3 bucket.
:return: The template with references to artifacts that have been
exported to s3.
"""
if "Resources" not in self.template_dict:
return self.template_dict
for resource_id, resource in self.template_dict["Resources"].items():
resource_type = resource.get("Type", None)
resource_dict = resource.get("Properties", None)
if resource_type in self.resources_to_export:
# Export code resources
exporter = self.resources_to_export[resource_type](
self.uploader)
exporter.export(resource_id, resource_dict, self.template_dir)
return self.template_dict
| [
"jagreenf111@gmail.com"
] | jagreenf111@gmail.com |
f98de1eedd4d4c6fa28f8f76d28224574704c59b | 36959b56e506dbbe2d3c381cdccfe16965c14d24 | /Django/social/posts/migrations/0001_initial.py | e8fe75165e17e54f70de6e9f2a5a55528a6969fe | [] | no_license | Sathishkumar-M/Django | e2935fe0c69acb4cb39be2bc0504fd3d5619d002 | e54038ef70295274639b6207efe8e7e3939cbe36 | refs/heads/master | 2020-03-21T20:22:48.684770 | 2018-06-28T10:42:51 | 2018-06-28T10:42:51 | 139,003,364 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-11-28 15:20
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('groups', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now=True)),
('message', models.TextField()),
('message_html', models.TextField(editable=False)),
('group', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='groups.Group')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-created_at'],
},
),
migrations.AlterUniqueTogether(
name='post',
unique_together=set([('user', 'message')]),
),
]
| [
"sathishkumar.appiness@gmail.com"
] | sathishkumar.appiness@gmail.com |
50d0989520ba6a55b188773a135e56c496e1035a | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2639/60636/252547.py | c0df1380f133df5be225f8d9e3d808ba7ff728ff | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 632 | py | import collections
def characterReplacement(s,k):
res=0
l=0
mf=0
a=list(s)
alls=[]
for i in a:
if(not i in alls):
alls.append(i)
count=[]
for i in alls:
count.append(0)
d = collections.defaultdict(int)
for r, c in enumerate(s):
d[a[r]]=d[a[r]]+1
print(d)
count[alls.index(a[r])]=count[alls.index(a[r])]+1
print(count)
mf = max(mf, d[a[r]])
while r - l + 1 - mf > k:
d[s[l]] -= 1
l += 1
res = max(res, r - l + 1)
return res
s=input()
k=int(input())
print(characterReplacement(s,k)) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
fd92e6b94e9cc2cef862406f623ad9a42b1ba1ff | 6e1407eb15c2003e8b62073b719dc676d8652366 | /aiodjango/routing.py | 00f1f431ec7ae22780028bd9b4478e9764bde86a | [
"BSD-2-Clause"
] | permissive | mlavin/aiodjango | 9f65155ac85de91ba14cb820b9c1d56849ffa05c | 9989f3160453088cd7bfdfde218da143ee9f93ee | refs/heads/master | 2021-07-16T07:56:29.658857 | 2015-12-22T14:54:31 | 2015-12-22T14:54:31 | 48,146,080 | 7 | 6 | BSD-2-Clause | 2021-02-26T02:13:06 | 2015-12-17T01:55:48 | Python | UTF-8 | Python | false | false | 1,570 | py | import asyncio
import inspect
import re
from importlib import import_module
from django.conf import settings
from django.contrib.admindocs.views import extract_views_from_urlpatterns
from django.core.urlresolvers import reverse
from aiohttp.web import DynamicRoute
class DjangoRegexRoute(DynamicRoute):
"""Compatibility shim between routing frameworks."""
def __init__(self, method, handler, name, regex, *, expect_handler=None):
if not regex.lstrip('^').startswith('/'):
regex = '^/' + regex.lstrip('^')
pattern = re.compile(regex)
super().__init__(method, handler, name, pattern, None, expect_handler=expect_handler)
def url(self, query=None, **kwargs):
url = reverse(self.name, kwargs=kwargs)
return self._append_query(url, query)
def __repr__(self):
name = "'" + self.name + "' " if self.name is not None else ""
return ("<DjangoRegexRoute {name}[{method}] -> {handler!r}"
.format(name=name, method=self.method, handler=self.handler))
def get_aio_routes(patterns=None):
"""Walk the URL patterns to find any coroutine views."""
if patterns is None:
urlconf = import_module(settings.ROOT_URLCONF)
patterns = urlconf.urlpatterns
routes = []
view_functions = extract_views_from_urlpatterns(patterns)
for (func, regex, namespace, name) in view_functions:
if asyncio.iscoroutinefunction(func) or inspect.isgeneratorfunction(func):
routes.append(DjangoRegexRoute('*', func, name, regex))
return routes
| [
"markdlavin@gmail.com"
] | markdlavin@gmail.com |
c48bc70ecb26b1f86f89404e10f91de9a4982f63 | e585c3a61b830d3c24a8cec8343d262c84c724e7 | /Figaro/page_objects/common/menu/locators.py | a64cf359cb42a79dd64da1b9e84261469740bec3 | [] | no_license | Valupiruiz/AutomationPHP | bb0728b2b6508b017c133a7d560a652033adeaf4 | 9a92634ac9f5b27e46723294f9a4cc83a1f99252 | refs/heads/master | 2023-01-18T17:27:57.819270 | 2020-11-27T15:04:49 | 2020-11-27T15:04:49 | 310,594,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,490 | py | from selenium.webdriver.common.by import By
from page_objects.base_page import Locator
class MenuLocators:
SOLAPA_MENU_LBL = Locator(By.LINK_TEXT, "{solapa}")
# HOME_LBL = Locator(By.XPATH, "//a[contains(text(),'Home')]")
# MATERIAS_LBL = Locator(By.XPATH, "//a[contains(text(),'Materias')]")
# NOTICIAS_LBL = Locator(By.XPATH, "//a[contains(text(),'Noticias')]")
# ADMISIONES_LBL = Locator(By.XPATH, "//a[contains(text(),'Admisiones')]")
# INFORMES_LBL = Locator(By.XPATH, "//td[contains(@class,'tab')]//a[contains(text(),'Informes')]")
# PRESENTISMO_LBL = Locator(By.XPATH, "//a[contains(text(),'Presentismo')]")
# ASISTENCIA_LBL = Locator(By.XPATH, "//a[contains(text(),'Asistencia')]")
# CONVIVENCIA_LBL = Locator(By.XPATH, "//a[contains(text(),'Convivencia')]")
# CERTIFICADOS_LBL = Locator(By.XPATH, "//a[contains(text(),'Certificados')]")
# BOLETINES_LBL = Locator(By.XPATH, "//a[contains(text(),'Boletines')]")
# GRUPOS_USUARIO_LBL = Locator(By.XPATH, "//a[contains(text(),'Grupos de usuarios')]")
# SEGUIMIENTO_DOCENTE_LBL = Locator(By.XPATH, "//a[contains(text(),'Seguimiento Docentes')]")
# ADMINISTRACION_LBL = Locator(By.XPATH, "//a[contains(text(),'Administración')]")
# PERMISOS_LBL = Locator(By.XPATH, "//a[contains(text(),'Permisos')]")
# MIS_CURSOS_LBL = Locator(By.XPATH, "//a[contains(text(),'Mis cursos')]")
# VINCULOS_EXTERNOS_LBL = Locator(By.XPATH, "//a[contains(text(),'Vínculos Externos')]")
| [
"tomasmoreira04@gmail.com"
] | tomasmoreira04@gmail.com |
0027b52908f754c4a3dd274b8b33492640656baa | 9f99485ac5479c1e6169e71d88a33c31ff591f4e | /tests/app/na_celery/test_stats_tasks.py | db6b7dcaaedad6fb138030fad167c6faa5da71c5 | [
"MIT"
] | permissive | NewAcropolis/api | b8c65554ca78ac0e87fbef46f5f2fbecb6d7700a | 34367f55d3c9ee5bf870956ffc90fd23da559b15 | refs/heads/master | 2023-08-31T09:27:02.125549 | 2023-08-26T22:15:10 | 2023-08-26T22:15:10 | 99,582,634 | 1 | 1 | MIT | 2023-08-26T22:15:11 | 2017-08-07T13:46:23 | Python | UTF-8 | Python | false | false | 4,282 | py | import werkzeug
werkzeug.cached_property = werkzeug.utils.cached_property
from flask import current_app
from freezegun import freeze_time
from mock import call
import requests_mock
from app.na_celery.stats_tasks import send_num_subscribers_and_social_stats
from tests.db import create_member
class WhenProcessingSendNumSubscribersTask:
@freeze_time("2021-01-01T10:00:00")
def it_sends_num_subscribers_and_social_stats(self, mocker, db, db_session):
create_member(created_at='2020-10-10T10:00:00')
create_member(email='test2@example.com', created_at='2020-12-10T10:00:00')
mock_send_ga_event = mocker.patch('app.na_celery.stats_tasks.send_ga_event')
with requests_mock.mock() as r:
r.get(
current_app.config.get('FACEBOOK_URL'),
text='<html><body><div><div>1,000</div><div>Total follows</div></div></body></html>')
r.get(
current_app.config.get('INSTAGRAM_URL'),
text='{"data":{"user":{"edge_followed_by":{"count":1100,"page_info":'
'{"has_next_page":false,"end_cursor":null},"edges":[]}}},"status":"ok"}')
send_num_subscribers_and_social_stats()
assert mock_send_ga_event.call_args_list == [
call('Number of subscribers', 'members', 'num_subscribers_december', 2),
call('Number of new subscribers', 'members', 'num_new_subscribers_december', 1),
call('Facebook followers count', 'social', 'num_facebook_december', 1000),
call('Instagram followers count', 'social', 'num_instagram_december', 1100),
]
@freeze_time("2021-01-01T10:00:00")
def it_doesnt_send_instagram_stats(self, mocker, db, db_session):
mocker.patch.dict('app.application.config', {
'INSTAGRAM_URL': ''
})
create_member(created_at='2020-10-10T10:00:00')
create_member(email='test2@example.com', created_at='2020-12-10T10:00:00')
mock_send_ga_event = mocker.patch('app.na_celery.stats_tasks.send_ga_event')
with requests_mock.mock() as r:
r.get(
current_app.config.get('FACEBOOK_URL'),
text='<html><body><div><div>1,000</div><div>Total follows</div></div></body></html>')
r.get(
current_app.config.get('INSTAGRAM_URL'),
text='{"data":{"user":{"edge_followed_by":{"count":1100,"page_info":'
'{"has_next_page":false,"end_cursor":null},"edges":[]}}},"status":"ok"}')
send_num_subscribers_and_social_stats()
assert mock_send_ga_event.call_args_list == [
call('Number of subscribers', 'members', 'num_subscribers_december', 2),
call('Number of new subscribers', 'members', 'num_new_subscribers_december', 1),
call('Facebook followers count', 'social', 'num_facebook_december', 1000),
call('Instagram followers count', 'social', 'num_instagram_december', 'url not set'),
]
@freeze_time("2020-12-01T10:00:00")
def it_sends_num_subscribers_and_failed_social_stats(self, mocker, db, db_session):
create_member(created_at='2020-10-10T10:00:00')
mock_send_ga_event = mocker.patch('app.na_celery.stats_tasks.send_ga_event')
with requests_mock.mock() as r:
r.get(
current_app.config.get('FACEBOOK_URL'),
text='<html><body><div><div>1,000</div><div>Total followers</div></div></body></html>')
r.get(
current_app.config.get('INSTAGRAM_URL'),
text='<html><head><meta property="og:description" content="'
'1,100 Following, 200 Posts"/></head></html>')
send_num_subscribers_and_social_stats()
assert mock_send_ga_event.call_args_list == [
call('Number of subscribers', 'members', 'num_subscribers_november', 1),
call('Number of new subscribers', 'members', 'num_new_subscribers_november', 0),
call('Facebook followers count', 'social', 'num_facebook_november', 'failed'),
call('Instagram followers count', 'social', 'num_instagram_november', 'failed'),
]
| [
"kenlt.uk@gmail.com"
] | kenlt.uk@gmail.com |
1719e02eb066af7f9a41f3e1065f2b42d5223eb6 | 32c56293475f49c6dd1b0f1334756b5ad8763da9 | /google-cloud-sdk/lib/third_party/oauthlib/oauth1/rfc5849/endpoints/authorization.py | df211a199ad7c422aa7f8c84b0131a689a7228c2 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT",
"BSD-2-Clause"
] | permissive | bopopescu/socialliteapp | b9041f17f8724ee86f2ecc6e2e45b8ff6a44b494 | 85bb264e273568b5a0408f733b403c56373e2508 | refs/heads/master | 2022-11-20T03:01:47.654498 | 2020-02-01T20:29:43 | 2020-02-01T20:29:43 | 282,403,750 | 0 | 0 | MIT | 2020-07-25T08:31:59 | 2020-07-25T08:31:59 | null | UTF-8 | Python | false | false | 6,907 | py | # -*- coding: utf-8 -*-
"""oauthlib.oauth1.rfc5849.endpoints.authorization ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
"""
from __future__ import absolute_import, unicode_literals
from oauthlib.common import Request, add_params_to_uri
from .. import errors
from .base import BaseEndpoint
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
class AuthorizationEndpoint(BaseEndpoint):
"""An endpoint responsible for letting authenticated users authorize access
to their protected resources to a client.
Typical use would be to have two views, one for displaying the authorization
form and one to process said form on submission.
The first view will want to utilize ``get_realms_and_credentials`` to fetch
requested realms and useful client credentials, such as name and
description, to be used when creating the authorization form.
During form processing you can use ``create_authorization_response`` to
validate the request, create a verifier as well as prepare the final
redirection URI used to send the user back to the client.
See :doc:`/oauth1/validator` for details on which validator methods to
implement
for this endpoint.
"""
def create_verifier(self, request, credentials):
"""Create and save a new request token.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:param credentials: A dict of extra token credentials.
:returns: The verifier as a dict.
"""
verifier = {
'oauth_token': request.resource_owner_key,
'oauth_verifier': self.token_generator(),
}
verifier.update(credentials)
self.request_validator.save_verifier(request.resource_owner_key, verifier,
request)
return verifier
def create_authorization_response(self,
uri,
http_method='GET',
body=None,
headers=None,
realms=None,
credentials=None):
"""Create an authorization response, with a new request token if valid.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
:param body: The request body as a string.
:param headers: The request headers as a dict.
:param credentials: A list of credentials to include in the verifier.
:returns: A tuple of 3 elements.
1. A dict of headers to set on the response.
2. The response body as a string.
3. The response status code as an integer.
If the callback URI tied to the current token is "oob", a response with
a 200 status code will be returned. In this case, it may be desirable to
modify the response to better display the verifier to the client.
An example of an authorization request::
>>> from your_validator import your_validator
>>> from oauthlib.oauth1 import AuthorizationEndpoint
>>> endpoint = AuthorizationEndpoint(your_validator)
>>> h, b, s = endpoint.create_authorization_response(
... 'https://your.provider/authorize?oauth_token=...',
... credentials={
... 'extra': 'argument',
... })
>>> h
{'Location':
'https://the.client/callback?oauth_verifier=...&extra=argument'}
>>> b
None
>>> s
302
An example of a request with an "oob" callback::
>>> from your_validator import your_validator
>>> from oauthlib.oauth1 import AuthorizationEndpoint
>>> endpoint = AuthorizationEndpoint(your_validator)
>>> h, b, s = endpoint.create_authorization_response(
... 'https://your.provider/authorize?foo=bar',
... credentials={
... 'extra': 'argument',
... })
>>> h
{'Content-Type': 'application/x-www-form-urlencoded'}
>>> b
'oauth_verifier=...&extra=argument'
>>> s
200
"""
request = self._create_request(
uri, http_method=http_method, body=body, headers=headers)
if not request.resource_owner_key:
raise errors.InvalidRequestError(
'Missing mandatory parameter oauth_token.')
if not self.request_validator.verify_request_token(
request.resource_owner_key, request):
raise errors.InvalidClientError()
request.realms = realms
if (request.realms and not self.request_validator.verify_realms(
request.resource_owner_key, request.realms, request)):
raise errors.InvalidRequestError(
description=('User granted access to realms outside of '
'what the client may request.'))
verifier = self.create_verifier(request, credentials or {})
redirect_uri = self.request_validator.get_redirect_uri(
request.resource_owner_key, request)
if redirect_uri == 'oob':
response_headers = {'Content-Type': 'application/x-www-form-urlencoded'}
response_body = urlencode(verifier)
return response_headers, response_body, 200
else:
populated_redirect = add_params_to_uri(redirect_uri, verifier.items())
return {'Location': populated_redirect}, None, 302
def get_realms_and_credentials(self,
uri,
http_method='GET',
body=None,
headers=None):
"""Fetch realms and credentials for the presented request token.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
:param body: The request body as a string.
:param headers: The request headers as a dict.
:returns: A tuple of 2 elements.
1. A list of request realms.
2. A dict of credentials which may be useful in creating the
authorization form.
"""
request = self._create_request(
uri, http_method=http_method, body=body, headers=headers)
if not self.request_validator.verify_request_token(
request.resource_owner_key, request):
raise errors.InvalidClientError()
realms = self.request_validator.get_realms(request.resource_owner_key,
request)
return realms, {'resource_owner_key': request.resource_owner_key}
| [
"jonathang132298@gmail.com"
] | jonathang132298@gmail.com |
5f50cc960cb9a4c2fba3b5674ec717c3cdccc980 | 670f4ba8ded99b420c3454c6ae35789667880cc8 | /tobiko/tests/functional/shell/sh/test_execute.py | 6a8685be113b2165a32e615e5bb5013e091188ba | [
"Apache-2.0"
] | permissive | FedericoRessi/tobiko | 892db522198ab48380892138459d801c4bd00efa | ce2a8734f8b4203ec38078207297062263c49f6f | refs/heads/master | 2022-07-26T22:52:10.273883 | 2022-07-20T20:04:43 | 2022-07-20T20:04:43 | 145,856,925 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,929 | py | # Copyright (c) 2019 Red Hat, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import os
import typing
import testtools
import tobiko
from tobiko import config
from tobiko.openstack import keystone
from tobiko.openstack import stacks
from tobiko.shell import sh
from tobiko.shell import ssh
CONF = config.CONF
SSH_EXPECTED_SHELL = None
LOCAL_EXPECTED_SHELL = '/bin/sh -c'
class ExecuteTest(testtools.TestCase):
@property
def expected_shell(self) -> typing.Optional[str]:
if ssh.ssh_proxy_client() is None:
return LOCAL_EXPECTED_SHELL
else:
return SSH_EXPECTED_SHELL
def test_succeed(self,
command: sh.ShellCommandType = 'true',
stdin: str = None,
stdout: str = None,
stderr: str = None,
expect_exit_status: typing.Optional[int] = 0,
**kwargs):
process = self.execute(command=command,
stdin=stdin,
stdout=bool(stdout),
stderr=bool(stderr),
expect_exit_status=expect_exit_status,
**kwargs)
self.assertEqual(self.expected_command(command), process.command)
if stdin:
self.assertEqual(stdin, str(process.stdin))
else:
self.assertIsNone(process.stdin)
if stdout:
self.assertEqual(stdout, str(process.stdout))
else:
self.assertIsNone(process.stdout)
if stderr:
self.assertEqual(stderr, str(process.stderr))
else:
self.assertIsNone(process.stderr)
if expect_exit_status is not None:
self.assertEqual(0, process.exit_status)
def test_succeed_with_command_list(self):
self.test_succeed(['echo', 'something'],
stdout='something\n')
def test_succeed_reading_from_stdout(self):
self.test_succeed('echo something',
stdout='something\n')
def test_succeed_reading_from_stderr(self):
self.test_succeed('echo something >&2',
stderr='something\n')
def test_succeed_writing_to_stdin(self):
self.test_succeed('cat',
stdin='some input\n',
stdout='some input\n')
def test_succeed_with_timeout(self):
self.test_succeed(timeout=30.)
def test_succeed_with_no_exit_status(self):
self.test_succeed(command='false', expect_exit_status=None)
def test_succeed_with_current_dir(self):
temp_file = self.make_temporary()
self.execute(command=f"echo '{self.id()}' > '{temp_file}'")
self.test_succeed(command=f"cat './{os.path.basename(temp_file)}'",
current_dir=os.path.dirname(temp_file),
stdout=f"{self.id()}\n")
def test_fails(self, command='false', exit_status=None, stdin=None,
stdout=None, stderr=None, expect_exit_status=0,
**kwargs):
ex = self.assertRaises(sh.ShellCommandFailed,
self.execute,
command=command,
expect_exit_status=expect_exit_status,
stdin=stdin,
stdout=bool(stdout),
stderr=bool(stderr),
**kwargs)
self.assertEqual(self.expected_command(command), ex.command)
if stdin:
self.assertEqual(stdin, ex.stdin)
else:
self.assertIsNone(ex.stdin)
if stdout:
self.assertEqual(stdout, ex.stdout)
else:
self.assertIsNone(ex.stdout)
if stderr:
self.assertEqual(stderr, ex.stderr)
else:
self.assertIsNone(ex.stderr)
if exit_status is not None:
self.assertEqual(exit_status, ex.exit_status)
else:
self.assertTrue(ex.exit_status)
def test_fails_getting_exit_status(self):
self.test_fails('exit 15', exit_status=15)
def test_fails_reading_from_stdout(self):
self.test_fails(command='echo something && false',
stdout='something\n')
def test_fails_reading_from_stderr(self):
self.test_fails(command='echo something >&2 && false',
stderr='something\n')
def test_fails_writing_to_stdin(self):
self.test_fails('cat && false',
stdin='some input\n',
stdout='some input\n')
def test_fails_with_check_exit_status(self):
self.test_fails(command='true', expect_exit_status=1, exit_status=0)
def test_timeout_expires(self, command='sleep 10', timeout=5., stdin=None,
stdout=None, stderr=None, **kwargs):
ex = self.assertRaises(sh.ShellTimeoutExpired,
self.execute,
command=command,
timeout=timeout,
stdin=stdin,
stdout=bool(stdout),
stderr=bool(stderr),
**kwargs)
self.assertEqual(self.expected_command(command), ex.command)
if stdin:
self.assertTrue(stdin.startswith(ex.stdin))
else:
self.assertIsNone(ex.stdin)
if stdout:
self.assertTrue(stdout.startswith(ex.stdout))
else:
self.assertIsNone(ex.stdout)
if stderr:
self.assertTrue(stderr.startswith(ex.stderr))
else:
self.assertIsNone(ex.stderr)
self.assertEqual(timeout, ex.timeout)
def make_temporary(self,
directory=False,
add_cleanup=True) -> str:
command = sh.shell_command('mktemp')
if directory:
command += '-d'
temporary_path = self.execute(command=command).stdout.strip()
if add_cleanup:
self.addCleanup(sh.execute, f"rm -fR '{temporary_path}'")
return temporary_path
def execute(self, **kwargs):
return sh.execute(**kwargs)
def expected_command(self, command):
command = sh.shell_command(command)
if self.expected_shell is not None:
command = sh.shell_command(self.expected_shell) + [str(command)]
return str(command)
class LocalExecuteTest(ExecuteTest):
expected_shell = LOCAL_EXPECTED_SHELL
def execute(self, **kwargs):
return sh.local_execute(**kwargs)
@keystone.skip_unless_has_keystone_credentials()
class SSHExecuteTest(ExecuteTest):
expected_shell = SSH_EXPECTED_SHELL
server_stack = tobiko.required_fixture(
stacks.UbuntuMinimalServerStackFixture)
@property
def ssh_client(self):
return self.server_stack.ssh_client
def execute(self, **kwargs):
return sh.ssh_execute(ssh_client=self.ssh_client, **kwargs)
@keystone.skip_unless_has_keystone_credentials()
class CirrosSSHExecuteTest(SSHExecuteTest):
server_stack = tobiko.required_fixture(
stacks.CirrosServerStackFixture)
| [
"fressi@redhat.com"
] | fressi@redhat.com |
2d294b34dda835cf074d3b465100db62976b0c34 | da7fd7d45140a244809b81fa7ede3cbe58b82c8b | /azure-kusto-data/azure/kusto/data/aio/_models.py | fab3892a573bd9cc9ffc289837928129719fce0d | [
"MIT"
] | permissive | Azure/azure-kusto-python | 61ba3a1e78de8b1872dc6cb7d2d3799913b2a154 | 59e263b17716b1499e596d667c1137598b98aac0 | refs/heads/master | 2023-08-31T13:52:27.628079 | 2023-08-31T06:41:06 | 2023-08-31T06:41:06 | 108,257,720 | 176 | 107 | MIT | 2023-09-06T18:14:56 | 2017-10-25T10:55:44 | Python | UTF-8 | Python | false | false | 590 | py | from typing import AsyncIterator
from azure.kusto.data._models import KustoResultRow, BaseStreamingKustoResultTable
class KustoStreamingResultTable(BaseStreamingKustoResultTable):
"""Async Iterator over a Kusto result table."""
async def __anext__(self) -> KustoResultRow:
try:
row = await self.raw_rows.__anext__()
except StopAsyncIteration:
self.finished = True
raise
self.row_count += 1
return KustoResultRow(self.columns, row)
def __aiter__(self) -> AsyncIterator[KustoResultRow]:
return self
| [
"noreply@github.com"
] | Azure.noreply@github.com |
3747d37e151d236d260c992a23eda35e08cf136e | 6a1070b89cd0342d0b6ca3a0bda86b538ce554d0 | /ibis/impala/tests/test_connection_pool.py | 043459754ce60a9bbc602df3b9224a3abddbb74a | [
"Apache-2.0"
] | permissive | deepfield/ibis | d115013f602378a808b8d2000c27e94d9f3eb115 | 1b4e276a9404ad5e8025918abd28bc04dc9ff1bb | refs/heads/master | 2021-07-10T02:31:59.900362 | 2019-05-21T14:25:31 | 2019-05-21T14:25:31 | 64,854,231 | 0 | 1 | Apache-2.0 | 2021-05-07T17:41:58 | 2016-08-03T14:51:03 | Python | UTF-8 | Python | false | false | 681 | py | import pytest
import ibis
pytest.importorskip('sqlalchemy')
pytest.importorskip('impala.dbapi')
pytestmark = pytest.mark.impala
def test_connection_pool_size(hdfs, env, test_data_db):
client = ibis.impala.connect(
port=env.impala_port,
hdfs_client=hdfs,
host=env.impala_host,
database=test_data_db,
)
assert len(client.con.connection_pool) == 1
def test_connection_pool_size_after_close(hdfs, env, test_data_db):
client = ibis.impala.connect(
port=env.impala_port,
hdfs_client=hdfs,
host=env.impala_host,
database=test_data_db,
)
client.close()
assert not client.con.connection_pool
| [
"cpcloud@gmail.com"
] | cpcloud@gmail.com |
f38f48ad2b4f79421fb37be2f69f95435862c7d9 | 5e350a7f147a52a72ca3a859a1df5b3acd710952 | /service/migrations/0008_appointment_offset.py | 2c7f26a1ec1b300e952c114f0d09ae31c015b285 | [
"MIT"
] | permissive | jwoolnt/Opal-Health | 1330389f8698f85ead76e22aff094e5e1b482b90 | ebe0e4c88157ddbe8e56afcc43e4097ca8c4bd49 | refs/heads/master | 2023-07-05T22:07:28.119362 | 2021-03-17T13:27:33 | 2021-03-17T13:27:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | # Generated by Django 3.1.3 on 2020-11-12 19:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('service', '0007_auto_20201112_1800'),
]
operations = [
migrations.AddField(
model_name='appointment',
name='offset',
field=models.CharField(default='+00:00', max_length=6),
),
]
| [
"you@example.com"
] | you@example.com |
29d409c9d7bf4680ad1ee7ea08d6f17b97a1e0dc | b0814b43440a36c9998924c9fe05f335302a2717 | /venv/lib/python2.7/site-packages/traits/tests/test_special_event_handlers.py | ce1a437d5eed37021e10887ac40ed65986d7eed6 | [
"MIT"
] | permissive | nagyistge/electrode-gui | 0b47324ce8c61ffb54c24c400aee85f16fd79c7a | 6d89c78ea61935042ead5df5e1474101df3557eb | refs/heads/master | 2021-06-03T22:47:30.329355 | 2016-09-13T19:43:31 | 2016-09-13T19:43:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 901 | py | from traits.testing.unittest_tools import unittest
from traits.api import Any, HasStrictTraits, Str
class TestSpecialEvent(unittest.TestCase):
""" Test demonstrating special change events using the 'event' metadata.
"""
def setUp(self):
self.change_events = []
self.foo = Foo(test=self)
def test_events(self):
self.foo.val = 'CHANGE'
values = ['CHANGE']
self.failUnlessEqual(self.change_events, values)
def test_instance_events(self):
foo = self.foo
foo.add_trait('val2', Str(event='the_trait'))
foo.val2 = 'CHANGE2'
values = ['CHANGE2']
self.failUnlessEqual(self.change_events, values)
class Foo(HasStrictTraits):
val = Str(event='the_trait')
test = Any(None)
def _the_trait_changed(self, new):
if self.test is not None:
self.test.change_events.append(new)
| [
"xavierislam@gmail.com"
] | xavierislam@gmail.com |
eaab8d8267b81b5a50c8a49ee7586f248350f51c | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /other_thing_and_part/last_week.py | 48024b96c47690f8612c0e49e007095cc1d25708 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py |
#! /usr/bin/env python
def last_hand(str_arg):
life_and_company(str_arg)
print('go_little_year_above_big_hand')
def life_and_company(str_arg):
print(str_arg)
if __name__ == '__main__':
last_hand('world')
| [
"jingkaitang@gmail.com"
] | jingkaitang@gmail.com |
4c86f506f6232048e2aabfdd346c4a31df66ce7d | ddf7d8f996a0cf66b0e083e0557305b3be4619e5 | /myJunkYard/myTutorials/Python/ciscoPythonClass/day01/print.py | a43916e1aba4c253e31c25065882c796758be863 | [] | no_license | archerImagine/myNewJunk | 8fab3e6ada03eee3aebb5c712d50bcfb38bf48b0 | 42fff352f6057f84ab8c81f1debc149881c1e49f | refs/heads/master | 2020-06-16T12:22:30.590672 | 2016-11-29T17:07:23 | 2016-11-29T17:07:23 | 75,103,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | #!/usr/bin/env python
from sys import stderr
print "Hello World",10,20,"AnotherString"
print "This is second Line"
print >>stderr, "This is error"
#python print.py >out, redirects to stderr | [
"animeshbhadra@Animeshs-MacBook-Pro.local"
] | animeshbhadra@Animeshs-MacBook-Pro.local |
bf03021e93bee7e9aa1a4c276f8d7c6ac90b041f | 72a3f41a94202d6d378c222c5cfa9e68155109bb | /selfika/trunk/widgets/set_media.py | 58b393e841d30d9fe4f6aca49b42758442b5c28c | [] | no_license | vakhov/python-django-projects | c312b8bcd94aa448a2678c156ff4936e4a68f668 | 6f296aa75d7692eb5dcb68ef4ce20cadee9dc9e6 | refs/heads/master | 2021-01-17T12:12:56.730072 | 2012-07-25T16:40:45 | 2012-07-25T16:40:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 866 | py | # -*- coding: utf-8 -*-
from django.db import models
from models import Widget
from grouping import create_group, add2group
create_group('media', 'Мультимедиа')
@add2group('Изображение', 'media')
class SimpleImage(Widget):
image = models.ImageField("Изображение", upload_to='media_widgets/image')
alt = models.CharField(max_length=255, blank=True, default="")
title = models.CharField(max_length=255, blank=True, default="")
link = models.CharField("Ссылка (необязательно)", max_length=255, blank=True, default="")
#@add2group('Видео', 'media')
#class SimpleVideo(Widget):
# video = models.FileField("Видео", upload_to='media_widgets/video')
# alt = models.CharField(max_length=255, blank=True, default="")
# title = models.CharField(max_length=255, blank=True, default="")
| [
"succubi@succubi-Inspiron-1501.(none)"
] | succubi@succubi-Inspiron-1501.(none) |
407cb591686cc57f8fe85bcbe48db25a1b164985 | 90a2d0bed5d9eeb6b56c7ac96cc5fbee79dc4c5e | /.history/adding_20210405134642.py | b024a9f73366483777d1107dfb968b7b69e78d48 | [] | no_license | KustomApe/dev | 2d495e22363707b15a22860a773dac6c463903ee | a936f5c3b0928eaa2efaf28c6be8cacc17c3ecb3 | refs/heads/master | 2023-04-28T11:20:03.056953 | 2023-04-07T17:43:40 | 2023-04-07T17:43:40 | 138,429,111 | 1 | 0 | null | 2023-04-25T19:26:09 | 2018-06-23T19:47:23 | Python | UTF-8 | Python | false | false | 499 | py | n = 1
t = 0
# 1から5までの足し算をループを使って実装する
# while True:
# if n == 6:
# print(t)
# break
# else:
# t = t + n
# n = n + 1
# 1から10までの足し算をループを使って実装する
# while True:
# if n >= 11:
# print(t)
# break
# else:
# t = t + n
# n = n + 1
# 1から10までの奇数の合計値を計算する
n = 1
t = 0
while True:
if n <= 11:
tester = n + 1 | [
"kustomape@gmail.com"
] | kustomape@gmail.com |
fa4d6cbfa01ca1eaa307be9dd1d48a72df5366c4 | f416ab3adfb5c641dc84022f918df43985c19a09 | /problems/kattis/squarepeg/sol.py | fe44506a9020cf78270930365d6833c556f2478b | [] | no_license | NicoKNL/coding-problems | a4656e8423e8c7f54be1b9015a9502864f0b13a5 | 4c8c8d5da3cdf74aefcfad4e82066c4a4beb8c06 | refs/heads/master | 2023-07-26T02:00:35.834440 | 2023-07-11T22:47:13 | 2023-07-11T22:47:13 | 160,269,601 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | from math import sqrt
if __name__ == "__main__":
L, R = map(int, input().split())
diagonal = sqrt(L * L + L * L)
if diagonal <= 2 * R:
print("fits")
else:
print("nope")
| [
"klaassen.nico@gmail.com"
] | klaassen.nico@gmail.com |
dd3b537c41645fafe6a81fe7c885514876745109 | eddd71e0e73a080d2a4917a72d1a2c19776f7170 | /apps/blog/views.py | fb4f3e6cab1e56f9b2604d11db1081c34ca4f540 | [] | no_license | fshgrym/django-web | 17fe143f9c81bde57caea4b38248faf0e64b6489 | 0bb91b37bac9556f8b1fbcae2fa5a017a2d0c98d | refs/heads/master | 2020-03-09T12:25:38.046036 | 2018-04-10T17:20:18 | 2018-04-10T17:20:18 | 128,785,867 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,176 | py | # -*- coding:utf-8 -*-
import json
from django.shortcuts import render,HttpResponse,HttpResponseRedirect,render_to_response
from django.views.generic import View
from django.shortcuts import get_object_or_404
from django.core.urlresolvers import reverse
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout #登入和登出
from django.contrib.auth.decorators import login_required # 验证用户是否登录
from django.db.models import Q
# from django.utils import reverse
from pure_pagination import Paginator, EmptyPage, PageNotAnInteger
from .models import Article,Banner,Tag,Comment,Links,Category
from .form import CommentForm,RegisterForm,LoginForm
import markdown
# Create your views here.
class TagView(View):
def get(self,request,id):
links = Links.objects.all()
tag = get_object_or_404(Tag,id=id)
all_article = Article.objects.filter(tags=tag).order_by('-create_time')
comments_set = set()
all_comment = Comment.objects.all()
for i in all_comment:
comments = i.article
comments_set.add(comments)
comments_list = list(comments_set)
try:
page = request.GET.get('page', 1)
except PageNotAnInteger:
page = 1
p = Paginator(all_article,5, request=request)
all_article = p.page(page)
return render(request, 'list.html',{'tag':tag,
'all_article':all_article,
'comments':comments_list,
'links':links
})
class ArchivesView(View):
'''归档'''
def get(self,request):
links = Links.objects.all()
comments_set = set()
all_comment = Comment.objects.all()
for i in all_comment:
comments = i.article
comments_set.add(comments)
comments_list = list(comments_set)
return render(request,'archives.html',{'comments':comments_list,
'links':links})
# class ArchivesView(View):
# '''归档'''
# def get(self,request,year,month):
# links = Links.objects.all()
#
# comments_set = set()
# all_comment = Comment.objects.all()
# for i in all_comment:
# comments = i.article
# comments_set.add(comments)
# comments_list = list(comments_set)
#
# post_list = Article.objects.filter(create_time__year=year, create_time__month=month).order_by('-create_time')
# try:
# page = request.GET.get('page', 1)
# except PageNotAnInteger:
# page = 1
# p = Paginator(post_list,5, request=request)
# all_article = p.page(page)
#
# return render(request, 'list.html',{
# 'all_article':all_article,
# 'comments':comments_list,
# 'links':links
# })
class CategoryView(View):
def get(self,request,id):
links = Links.objects.all()
cate = get_object_or_404(Category,id=id)
all_article = Article.objects.filter(category=cate).order_by('-create_time')
comments_set = set()
all_comment = Comment.objects.all()
for i in all_comment:
comments = i.article
comments_set.add(comments)
comments_list = list(comments_set)
try:
page = request.GET.get('page', 1)
except PageNotAnInteger:
page = 1
p = Paginator(all_article,5, request=request)
all_article = p.page(page)
return render(request, 'list.html',{
'all_article':all_article,
'comments':comments_list,
'links':links
})
from django import forms
class SearchForm(forms.Form):
keyword = forms.CharField(max_length=20)
class SearchView(View):
def get(self,request):
search_form = SearchForm()
if search_form.is_valid():
keyword=search_form.cleaned_data.get('keyword')
links = Links.objects.all()
keyword = request.GET.get('keyword','')
all_article = Article.objects.filter(Q(title__icontains=keyword)|Q(body__icontains=keyword))
comments_set = set()
all_comment = Comment.objects.all()
for i in all_comment:
comments = i.article
comments_set.add(comments)
comments_list = list(comments_set)
#分页功能
try:
page = request.GET.get('page', 1)
except PageNotAnInteger:
page = 1
p = Paginator(all_article,5, request=request)
all_article = p.page(page)
return render(request, 'list.html', {
'keyword':keyword,
'search_form':search_form,
'all_article': all_article,
'comments': comments_list,
'links': links
})
class IndexView(View):
def get(self, request):
links = Links.objects.all()
is_tab = Category.objects.filter(is_tab=True)
all_banner = Banner.objects.all().order_by('-index')[:5]
all_article = Article.objects.all().order_by('-create_time')
'''推荐'''
recom_article = Article.objects.filter(recom=True).order_by('-view')[:1]
'''最新评论'''
comments_set = set()
all_comment = Comment.objects.all()[:5]
for i in all_comment:
comments = i.article
comments_set.add(comments)
comments_list=list(comments_set)
try:
page = request.GET.get('page', 1)
except PageNotAnInteger:
page = 1
p = Paginator(all_article,5, request=request)
all_article = p.page(page)
#是否主页推荐
related_article = Article.objects.filter(is_related=True)
#获取文章最多的分类
# top_article = Article.objects.all(Category__name=)['']
return render(request, 'index.html',{
'all_article':all_article,
'all_banner':all_banner,
'recom_article':recom_article,
'comments':comments_list,
'links':links,
'related_article':related_article,
'is_tab':is_tab
})
# class ListView(View):
# def get(self, request):
# return render(request, 'list.html')
class ShowView(View):
def get(self,request,id):
links = Links.objects.all()
post = get_object_or_404(Article,id=id)
# post.body = markdown.markdown(post.body,
# extensions=[
# 'markdown.extensions.extra',
# 'markdown.extensions.codehilite',
# 'markdown.extensions.toc',
# ])
#评论
comments_set = set()
all_comment = Comment.objects.all()[:5]
for i in all_comment:
comments = i.article
comments_set.add(comments)
comments_list = list(comments_set)
#相关推荐
cat = post.category
related_article = Article.objects.filter(category=cat)[:8]
comment = post.comment_set.all()
post.view +=1
post.save()
return render(request, 'show.html',{'post':post,'comments_list':comment,'comments':comments_list,'links':links,'related_article':related_article})
def post(self,request,id):
post = get_object_or_404(Article,id=id)
form = CommentForm(request.POST)
if form.is_valid():
comment = form.save(commit=False) #commit=False 的作用是仅仅利用表单的数据生成 Comment 模型类的实例,但还不保存评论数据到数据库。
comment.article = post
comment.save()
return HttpResponse(
'{"status":"success"}',
content_type='application/json')
else:
return HttpResponse('{"status":"fail","msg":"评论失败"}', content_type="application/json")
# class CommentView(View):
#
def page_not_fount(request):
response = render_to_response('404.html')
response.status_code = 404
return response
def page_error(request):
response = render_to_response('500.html')
response.status_code = 500
return response
class LoginView(View):
def get(self,request):
login_form = LoginForm()
return render(request,'login.html',{'login_form':login_form})
def post(self,request):
login_form = LoginForm(request.POST)
if login_form.is_valid():
username = request.POST.get('username','')
password = request.POST.get('password','')
user = authenticate(username=username,password=password)
if user is not None:
if user.is_active:
login(request,user)
return HttpResponseRedirect(reverse('index'))
else:
msg = '该用户未激活'
return render(request,'login.html',{'msg':msg})
else:
msg = '用户名密码错误'
return render(request, 'login.html', {'msg': msg,'login_form':login_form})
else:
msg = '用户名密码错误'
return render(request, 'login.html', {'msg': msg,'login_form':login_form})
class RegisterView(View):
def get(self, request):
register_form = RegisterForm()
return render(request, 'register.html',{'register_form':register_form})
def post(self,request):
register_form = RegisterForm(request.POST)
if register_form.is_valid():
username = register_form.cleaned_data.get('username','')
email = register_form.cleaned_data.get('email','')
password = register_form.cleaned_data.get('password')
password1 = register_form.cleaned_data.get('password1')
if User.objects.filter(Q(email=email)|Q(username=username)):
return render(request, 'register.html', {'register_form': register_form,'email_msg':'邮箱用户名已经存在'})
elif password!=password1:
return render(request, 'register.html', {'register_form': register_form, 'pwd_msg': '密码不一致'})
user = User()
user.username = username
user.email = email
user.password = make_password(password)
user.save()
return render(request,'login.html')
else:
return render(request, 'register.html', {'register_form': register_form, 'msg': '注册失败'})
class LogoutView(View):
def get(self,request):
logout(request)
return HttpResponseRedirect(reverse('index'))
| [
"fshgrym"
] | fshgrym |
8937a9f52041c3813031738685edc0154b898fff | 02e930875e95713a387c60b7de816a3a354ceb3d | /bids/variables/tests/test_variables.py | c45eae2e84128fd3b477b5ef3604f43a45ffa328 | [
"MIT"
] | permissive | Islast/pybids | 6a7899cbef03ec3579a21e1cbe85deb5d180ca1e | 3e80e617e6bd6258e027c9937ccedafe1c8e6b14 | refs/heads/master | 2020-03-23T22:00:14.970425 | 2018-07-24T14:14:08 | 2018-07-24T14:14:08 | 142,146,832 | 0 | 0 | MIT | 2018-07-24T11:06:51 | 2018-07-24T11:06:51 | null | UTF-8 | Python | false | false | 6,023 | py | from bids.grabbids import BIDSLayout
import pytest
from os.path import join
from bids.tests import get_test_data_path
from bids.variables import (merge_variables, DenseRunVariable, SimpleVariable,
load_variables)
from bids.variables.entities import RunInfo
import numpy as np
import pandas as pd
import uuid
def generate_DEV(name='test', sr=20, duration=480):
n = duration * sr
values = np.random.normal(size=n)
ent_names = ['task', 'run', 'session', 'subject']
entities = {e: uuid.uuid4().hex for e in ent_names}
image = uuid.uuid4().hex + '.nii.gz'
run_info = RunInfo(entities, duration, 2, image)
return DenseRunVariable('test', values, run_info, 'dummy', sr)
@pytest.fixture
def layout1():
path = join(get_test_data_path(), 'ds005')
layout = BIDSLayout(path, exclude='derivatives/')
return layout
@pytest.fixture(scope="module")
def layout2():
path = join(get_test_data_path(), '7t_trt')
layout = BIDSLayout(path)
return layout
def test_dense_event_variable_init():
dev = generate_DEV()
assert dev.sampling_rate == 20
assert dev.run_info[0].duration == 480
assert dev.source == 'dummy'
assert len(dev.values) == len(dev.index)
def test_dense_event_variable_resample():
dev = generate_DEV()
dev2 = dev.clone().resample(sampling_rate=40)
assert len(dev2.values) == len(dev2.index)
assert len(dev2.values) == 2 * len(dev.values)
def test_merge_wrapper():
dev = generate_DEV()
data = pd.DataFrame({'amplitude': [4, 3, 2, 5]})
sev = SimpleVariable('simple', data, 'dummy')
# Should break if asked to merge different classes
with pytest.raises(ValueError) as e:
merge_variables([dev, sev])
assert "Variables of different classes" in str(e)
def test_sparse_run_variable_to_dense(layout1):
index = load_variables(layout1, types='events', scan_length=480)
runs = index.get_nodes('run', {'subject': ['01', '02']})
for i, run in enumerate(runs):
var = run.variables['RT']
dense = var.to_dense(20)
# Check that all unique values are identical
sparse_vals = set(np.unique(var.values.values)) | {0}
dense_vals = set(np.unique(dense.values.values))
assert sparse_vals == dense_vals
assert len(dense.values) > len(var.values)
assert isinstance(dense, DenseRunVariable)
assert dense.values.shape == (9600, 1)
assert len(dense.run_info) == len(var.run_info)
assert dense.source == 'events'
def test_merge_densified_variables(layout1):
SR = 10
dataset = load_variables(layout1, types='events', scan_length=480)
runs = dataset.get_nodes('run')
vars_ = [r.variables['RT'].to_dense(SR) for r in runs]
dense = merge_variables(vars_)
assert isinstance(dense, DenseRunVariable)
n_rows = 480 * SR
assert dense.values.shape == (len(runs) * n_rows, 1)
for i in range(len(runs)):
onset = i * n_rows
offset = onset + n_rows
run_vals = vars_[i].values
dense_vals = dense.values.iloc[onset:offset].reset_index(drop=True)
assert dense_vals.equals(run_vals)
def test_densify_merged_variables(layout1):
SR = 10
dataset = load_variables(layout1, types='events', scan_length=480)
runs = dataset.get_nodes('run')
vars_ = [r.variables['RT'] for r in runs]
var = merge_variables(vars_)
dense = var.to_dense(SR)
assert isinstance(dense, DenseRunVariable)
n_rows = 480 * SR
assert dense.values.shape == (len(runs) * n_rows, 1)
for i in range(len(runs)):
onset = i * n_rows
offset = onset + n_rows
run_vals = vars_[i].to_dense(SR).values
dense_vals = dense.values.iloc[onset:offset].reset_index(drop=True)
assert dense_vals.equals(run_vals)
def test_merge_simple_variables(layout2):
index = load_variables(layout2, types='sessions')
subjects = index.get_nodes('subject')
variables = [s.variables['panas_sad'] for s in subjects]
n_rows = sum([len(c.values) for c in variables])
merged = merge_variables(variables)
assert len(merged.values) == n_rows
assert set(merged.index.columns) == set(variables[0].index.columns)
assert variables[3].values.iloc[1] == merged.values.iloc[7]
def test_merge_sparse_run_variables(layout1):
dataset = load_variables(layout1, types='events', scan_length=480)
runs = dataset.get_nodes('run')
variables = [r.variables['RT'] for r in runs]
n_rows = sum([len(c.values) for c in variables])
merged = merge_variables(variables)
assert len(merged.values) == n_rows
assert set(merged.index.columns) == set(variables[0].index.columns)
def test_merge_dense_run_variables(layout2):
variables = [generate_DEV() for i in range(20)]
variables += [generate_DEV(duration=400) for i in range(8)]
n_rows = sum([len(c.values) for c in variables])
merged = merge_variables(variables)
assert len(merged.values) == n_rows
assert set(merged.index.columns) == set(variables[0].index.columns)
def test_simple_variable_to_df(layout1):
pass
def test_sparse_run_variable_to_df(layout1):
pass
def test_dense_run_variable_to_df(layout2):
pass
def test_filter_simple_variable(layout2):
dataset = load_variables(layout2, types=['scans'])
sessions = dataset.get_nodes('session')
variables = [s.variables['surroundings'] for s in sessions]
merged = merge_variables(variables)
assert merged.to_df().shape == (60, 9)
filt = merged.filter({'acq': 'fullbrain'})
assert filt.to_df().shape == (40, 9)
flt1 = merged.filter({'acq': 'fullbrain', 'subject': ['01', '02']}).to_df()
assert flt1.shape == (8, 9)
flt2 = merged.filter(query='acq=="fullbrain" and subject in ["01", "02"]')
flt2 = flt2.to_df()
assert flt1.equals(flt2)
assert merged.filter({'nonexistent': 2}, strict=True) is None
merged.filter({'acq': 'fullbrain'}, inplace=True)
assert merged.to_df().shape == (40, 9)
| [
"tyarkoni@gmail.com"
] | tyarkoni@gmail.com |
cca239ea5ee98bec2228ecbcfbeeadf91379410c | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_158/ch21_2020_03_29_16_38_41_677181.py | 7eda3cae717ddc93581b1305c19d213a48d64626 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 112 | py | d=input("dias")
h=input("horas")
m=input("minutos")
s=input("segundos")
t=d*86400+h*3600+m*60+s
print(round(t,2) | [
"you@example.com"
] | you@example.com |
4a35e5d1186ac7712235416eb1cf34eb9202f1a6 | d274e22b1cc5d546855fe46b089b13cfe2f4047c | /may2020/solutions/day03_RansomNote.py | 5957935a487e8bfd84bc16b19821e49b9a0ab508 | [] | no_license | varunkumar032/lockdown-leetcode | ca6b7a8133033110680dd226c897dd8a1482682b | 15a72a53be9005eca816f018cb1b244f2aa4cdfb | refs/heads/master | 2023-06-30T08:31:54.323747 | 2021-07-12T11:29:59 | 2021-07-12T11:29:59 | 260,616,280 | 0 | 0 | null | 2021-05-06T10:24:48 | 2020-05-02T04:52:37 | Python | UTF-8 | Python | false | false | 910 | py | # Given an arbitrary ransom note string and another string containing letters from all the magazines, write a function that will return true if the ransom note can be constructed from the magazines ; otherwise, it will return false.
# Each letter in the magazine string can only be used once in your ransom note.
# Note:
# You may assume that both strings contain only lowercase letters.
def canConstruct(ransomNote, magazine):
ransomNoteSet = set(ransomNote)
for item in ransomNoteSet:
if ransomNote.count(item)>magazine.count(item):
return False
return True
# Alternate solution:
# from collections import Counter
# def canConstruct(ransomNote, magazine):
# ransomNoteCount = Counter(ransomNote)
# magazineCount = Counter(magazine)
# for key in ransomNoteCount:
# if ransomNoteCount[key]>magazineCount[key]:
# return False
# return True
| [
"varunkumar032@gmail.com"
] | varunkumar032@gmail.com |
c8be73cd95d1fdc43b864eee4142eea6870b1356 | f7055e71c763d79abcef7283617df6d811375346 | /parlaskupine/utils_.py | 89134af1aa8bc921593cf9764abd7380d3de77b0 | [
"Unlicense"
] | permissive | VesterDe/parlalize | 8ec5aec20f3d2ae72e5485130031ba868554cdab | b725fe4b55b95f2ad3505aa70dac2474269ea3da | refs/heads/master | 2020-09-09T23:04:20.450691 | 2017-06-15T14:26:42 | 2017-06-15T14:26:42 | 94,447,286 | 1 | 0 | null | 2017-06-15T14:25:16 | 2017-06-15T14:25:16 | null | UTF-8 | Python | false | false | 611 | py | from operator import itemgetter
from .models import Organization
from datetime import datetime
from parlalize.settings import API_DATE_FORMAT
from django.http import JsonResponse
def getPgDataAPI(request, id_parladata, date_=None):
if not date_:
date_ = datetime.now().strftime(API_DATE_FORMAT)
org = Organization.objects.filter(id_parladata=id_parladata)
if org:
return JsonResponse(org[0].getOrganizationData())
else:
return JsonResponse({
'id': id_parladata,
'name': "unknown",
'acronym': "unknown",
})
| [
"tomazkunst@gmail.com"
] | tomazkunst@gmail.com |
b7ff2110a21f26b2ddd5139a08bce1432eaf7f5f | cfd9fa1af735ac3572954704a47e35543850b244 | /sanguo/guyu.py | d512591fcef6186a0022496d0cbfa89f6ab3ccc2 | [] | no_license | xingyueGK/hjsg | c1844ea8161d254f6d6cf70f42d1ac849e117438 | be0c4c457bdfaa9178f25f9f722dc78d88f24540 | refs/heads/master | 2022-12-12T08:28:55.823357 | 2020-12-05T12:02:06 | 2020-12-05T12:02:06 | 147,184,573 | 0 | 1 | null | 2022-01-06T22:26:48 | 2018-09-03T09:47:04 | HTML | UTF-8 | Python | false | false | 19,543 | py | # -*- coding:utf-8 -*-
import requests
import threading
try:
import cookielib
except:
import http.cookiejar as cookielib
import sys
import re
import time
import json
reload(sys)
sys.setdefaultencoding('utf-8')
headers = {
'Accept-Encoding':'gzip, deflate',
'Accept-Language':'zh-CN,zh;q=0.8',
'Connection':'keep-alive',
'Upgrade-Insecure-Requests':'1',
'Content-Type':'application/json',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36'
}
def get_html():
#获取首页html
url = 'http://uc.game.hanjiangsanguo.com/index.php?&c=user&m=login&&token=&channel=150&lang=zh-cn&rand=150959328607564&u=xingyue123&p=413728161'
r = requests.session()
return r.get(url).text
def get_token():
url = 'http://s21.game.hanjiangsanguo.com/index.php?v=0&c=login&&m=user&&token=&channel=150&lang=zh-cn&rand=150959405499450&u=xingyue123&p=413728161'
token = requests.session().get(url).text
tokens = json.loads(token)
return tokens['token']
def get_info():
token = get_token()
rand = int(time.time()*1000)
url='http://s21.game.hanjiangsanguo.com/index.php?v=0&c=member&&m=index&&token=%s&channel=150&lang=zh-cn&rand=%d'%(token,rand)
info = requests.post(url,headers=headers).text
return json.loads(info)
class SaoDangFb(object):
def __init__(self):
#随机请求参数
self.rand = str(int(time.time()*1000))
self.token_uid = '210000353508'
self.token = get_token()
#POST基础URL地址
self.url = 'http://s21.game.hanjiangsanguo.com/index.php?v=0&channel=150&lang=zh-cn&token=%s&token_uid=%s&rand=%s&'%(self.token,self.token_uid,self.rand)
def post_url(self,data):
#url拼接数据
self.url = 'http://s21.game.hanjiangsanguo.com/index.php?v=0&channel=150&lang=zh-cn&token=%s&token_uid=%s&rand=%s&' % (self.token, self.token_uid, self.rand)
for k,v in data.items():
self.url += '&%s=%s'%(k,v)
#print self.url
r = requests.post(self.url,headers=headers)
return r.text
def action(self,**kwargs):
"""动作参数m={'index':'获取基础账号密码等信息',‘get_monster_list’:“获取副本怪物列表信息”}
"""
self.action_data = kwargs
self.serverinfo = self.post_url(self.action_data)
#print self.serverinfo
return json.loads(self.serverinfo)
def copies(self):
# 扫荡副本需要传递的参数
# id 是副本名字id ,self.role_info
# diff_id是困难级别分别为1,2,3个级别
# monster_id 是第几个怪物,1-10个,
#times 扫荡的次数
for id in range(1,4):
#遍历三个副本,
print '开始扫荡副本:%s'%id
for diff_id in range(1,4):
print '开始扫关卡:%s' % diff_id
#遍历三个难度,普通,困难,英雄
#for monster_id in range(1,11): #此选项为攻击所有小怪
for monster_id in [3,6,9,10]:#攻击精英怪
#遍历十次小兵
print "开始扫荡小兵"
try:
times = self.action(c="copies",m="get_monster_info",id=id,diff_id=diff_id,monster_id=monster_id,d="newequip")['info']['free_times']
except Exception:
pass
if times != '0':
print self.action(c="copies",m="mop_up",id=id,diff_id=diff_id,monster_id=monster_id,d="newequip",times=int(times))
def qiandao(self):#签到
# 领取连续登陆15天奖励,id:15,c:logined,m:get_reward
print self.action(c='logined',m='index')
print self.action(c='logined',m='get_reward',id=15)
#每日签到,所有动作就是c内容,m动作参数即可,包括领取vip工资,还有每日抽奖
self.action(c='sign',m='sign_index')
# c:vipwage,m:get_vip_wage,领取VIP每日奖励
self.action(c='vipwage',m='get_vip_wage')
def zhengshou(self):#征收
cishu = self.action(c='city',m='index')#获取银币征收次数,m=impose,执行征收
cishu_count = cishu['times']
if cishu_count != '0':#判断征收次数是否为0,不为0则进行全部征收
for count in range(1,int(cishu_count)+1):
print '开始征收第 %d 次'%count
time.sleep(0.5)
print self.action(c='city',m='impose')
else:
print '次数为0次'
def hitegg(self):#砸蛋
hitegg_cd = self.action(c='hitegg',m='index')#获取砸蛋首页面
for i in range(3):
cd = hitegg_cd['list'][i]['cd']
if cd == 0:
print '砸蛋成功'
_id = i+1
self.action(c='hitegg',m='hit_egg',id=_id)
def island(self):#金银洞活动
#获取当前攻击的次数和金银守护者5的状态,是否为攻击过,如果为1则为可以攻击,为0 则表示不可以
count = self.action(c='island',m='get_mission',id=85)['info']['act']
id_open = self.action(c='island',m='index')['list'][4]['openstatus']
if count <= 10 and id_open != 1:
for i in range(81,86):#每日共计5次
print self.action(c='island',m='pk',id=i) #共计金银洞
id_open = self.action(c='island', m='index')['list'][4]['openstatus']
if count <= 10 and id_open == 1:
for i in range(5):
print self.action(c='island', m='pk', id=85)#共计通过之后的最高金银洞5次
else:
print '今天已经攻击了10次不在攻打'
def worldboss(self):#世界boss领取
#银币鼓舞
now_time = time.strftime('%H:%M:%S')
if '20:00:00' < now_time < '20:15:00':
boss_info = self.action(c='worldboss',m='index')
countdown = boss_info['countdown']
powerup = boss_info['powerup']
if powerup != 200:
for i in range(10):
self.action(c='worldboss',m='powerup',gold=0)
while countdown >0:
#获取boss退出世界
countdown = boss_info['countdown']
self.action(c='worldboss',m='battle')
time.sleep(61)
if countdown == 0:
self.action(c='worldboss',m='reward')#reward领取奖励
else:
print '世界boos未开始'
def overseastrade(self):#海外贸易
#购买粮食,花费银币的,id=1为粮食,id2-5为花费元宝的玛瑙等
self.action(c='overseastrade',m='buy_item',id=1)
# 组队 ,检查是否有对, 有则加入,没有则创建 ,开始贸易
#1获取组队列表
list_country = self.action(c='overseastrade',m='get_list_by_country',p=1)['list']
if list_country:#如果列表不为空,说明有组
#自动加组贸易
for k,v in list_country.items():#判断第一个角色有值没有,有责加入第二个,没有则加入第一个#需要time_id
if v['member1'] != '0':#如果不为0 则说明角色有人,加入另一个,
print '加入2'
self.id = v['id']
print self.action(c="overseastrade", m='join_team',id=self.id, place=int(k),site=2,page=1)
else:
print '加入1'
self.id = v['id']
print self.action(c="overseastrade", m='join_team',id=self.id,place=int(k),site=1, page=1)
#print list_country[k]['member1']
else:
#加入贸易队伍,每页有四个框,为place:1-4,每个框有两个位置site:1-2,页数为page:1-10默认为1即可,
print self.action(c="overseastrade",m='join_team',id=0,place=4,site=2,page=1)
def tower(self):#将魂星路
#领取每日奖励
self.action(c='tower',m='reward_info')
self.action(c='tower',m='get_reward')
#获取次数:
self.tower_times = self.action(c='tower',m='get_mission_list',s=7)['times']
print self.action(c='tower',m='mop_up',id=174,times=self.tower_times)
def business(self):#
#获取通商次数
business_times = self.action(c='business',m='index')['times']
print '可用通商次数 %s'%business_times
for count in range(business_times):#执行通商次数
#每次通商是需要输入通商id
print '开始第 %s 次通商'%count
business_id=self.action(c='business', m='index')['trader'][0]['id']
self.action(c='business',m='go_business',id=business_id)
print '通商完成'
def generaltask(self):#每日神将
self.number = self.action(c='generaltask',m='index')['number']#获取次数
print '开始神将扫荡,共计 %s 次'%self.number
#使用长孙无忌gid=210000353508
#怪物id=255
for count in range(int(self.number)):
self.action(type=0,id=255,gid='210000398930',c='generaltask',m='action')
print '神将10次扫荡完毕'
def sanctum(self):
#每日宝石领奖
try:
print '领取每日宝石奖励'
self.action(c='sanctum',m='get_reward',type=1,multiple=0)
except:
print '已经领取宝石奖励'
#扫荡宝石次数
#获取次数
print '开始扫荡宝石'
numbers = self.action(c='sanctum',m='select_map',l=3)['times']
if numbers != 0:
self.action(c='sanctum',m='action',id=150,num=numbers)
else:
print '剩余次数为 %s 次'%numbers
print '宝石扫荡结束'
def lottery(self):#每日抽奖
#c=lottery,m=action
#获取每日抽奖次数
self.numbers = self.action(c='lottery',m='index')['log']['info']['total_num']
print '开始抽奖,剩余次数 %s' % self.numbers
for num in range(self.numbers):
self.action(c='lottery',m='action')
print '抽奖结束'
def herothrone(self):#英雄王座
self.action(c='herothrone',m='index')
for i in range(3):
self.action(c='herothrone',m='start')#开始王座
#攻击:
while True:
flag = self.action(c='herothrone', m='action')['status']
print '攻击王座副本'
if flag == -2:
break
def workshop(self):#玉石收集
#收取
for i in range(1,7):
self.action(c='workshop',m='get_reward',s=i)
def exploit_tree(self):#木材收集
#gather收集,site:1,第一个框
self.action(c='exploit_tree',m='gather',site=1)
self.action(c='exploit_tree',m='action',site=1)
def exploit_stone(self):#石头收集
#exploit_stone,m:{gather收集,action,采集}site:1,第一个框,有三个
for i in range(1,4):
self.action(c='exploit_stone', m='gather', site=i)
self.action(c='exploit_stone', m='action', site=i)
def heaven(self):#通天塔每日奖励和扫荡
#获取每日奖励
self.action(c='heaven',m='get_reward')
self.times = self.action(c='heaven',m='index')['times']
if self.times:
self.action(c='heaven',m='mop_up',id=90,times = self.times)
def arena(self):#
self.action(c='arena', m='index')
self.action(c='arena',m='get_reward')
def zimap(self):#获取图片
#levev:7,11,14是红色sh关卡s:1-9,id:6
#扫荡金色以上5-9
#获取次数nowmaxtimes
for level in range(10,11):#遍历每一个图
#for level in range(14, 17): # 遍历每一个图红色使用
print '开始攻击第 %s 个图'%level
site = len(self.action(c='map',m='get_scene_list',l=level)['list'])
for i in range(site):#遍历关卡图次数
print '攻击第 %s 个关卡' %(i+1)
for id in range(5,10): # 遍历5个小兵
#for id in range(4,9):#遍历5个小兵红色使用
#判断当前次数是否为0次,如果为0 则不扫荡
if level==8 and id !=4:
continue
times = self.action(c='map',m='mission',l=level,s=i+1,id=id)['info']['nowmaxtimes']
#times = self.action(c='map', m='mission', l=level, s=i + 1, id=id)['info']['maxtimes']#红色天赋
print '剩余扫荡次数 %s' %times
if times !=0:
#print 'gongji',level,i+1,id,times
print self.action(c='map',m='action',l=level,s=i+1,id=id,times=times)
def hongmap(self):#获取图片
#levev:7,11,14是红色sh关卡s:1-9,id:6
#扫荡金色以上5-9
#获取次数nowmaxtimes
#for level in range(8,11):#遍历每一个图
for level in range(14, 17): # 遍历每一个图红色使用
print '开始攻击第 %s 个图'%level
site = len(self.action(c='map',m='get_scene_list',l=level)['list'])
for i in range(site):#遍历关卡图次数
print '攻击第 %s 个关卡' %(i+1)
#for id in range(5,10): # 遍历5个小兵
for id in range(4,9):#遍历5个小兵红色使用
#判断当前次数是否为0次,如果为0 则不扫荡
try:
self.action(c='map',m='mission',l=level,s=i+1,id=id)['info']
except KeyError:
continue
times = self.action(c='map',m='mission',l=level,s=i+1,id=id)['info']['nowmaxtimes']
#times = self.action(c='map', m='mission', l=level, s=i + 1, id=id)['info']['maxtimes']#红色天赋
print '剩余扫荡次数 %s' %times
if times !=0:
#print 'gongji',level,i+1,id,times
self.action(c='map',m='action',l=level,s=i+1,id=id,times=times)
def guyu(self):#获取古玉购买
print self.action(c='actguyu',m='reward_index',id=16,num=1)
print self.action(c='actguyu', m='reward_index', id=16, num=50)
def mount_stone(self):
self.action(c='mountstone_throne', m='index')
for i in range(3):
self.action(c='mountstone_throne', m='start') # 开始王座
# 攻击:
while True:
flag = self.action(c='mountstone_throne', m='action')['status']
print '攻击符石副本'
if flag == -2:
break
def dice(self):#国家摇色子
points = self.action(c='dice', m='index')['member']['points']
if int(points) > 400:
self.action(c='dice', m='get_reward',id=2)
for i in range(1,8):
self.action(c='dice',m='shake_dice')
def act_steadily(self):#节节高
info = self.action(c='act_steadily',m='index')
if info['reward']:
status = info['status']
reward_cd = info['reward_cd']
t = info['reward']['time']
if reward_cd == 0 and status == 1:
self.action(c='act_steadily',m='get_online_reward',t=t)
elif reward_cd == 0 and status != 1:
status = 3
return status
else:
print '%s分钟后领取,%s'%(reward_cd/60,reward_cd)
time.sleep(reward_cd+1)
self.action(c='act_steadily', m='get_online_reward', t=t)
else:
print '节节高领完奖励'
status = 3
return status
def act_sword(self):#铸剑
info = self.action(c='act_sword',m='index')
#print json.dumps(info)
need_nums = int(info['need_nums'])
nums = info['nums']
print need_nums,nums
#收获
if need_nums == nums:
self.action(c='act_sword', m='index')
time.sleep(0.5)
self.action(c='act_sword', m='get_cast_reward')
time.sleep(0.5)
self.action(c='act_sword', m='index')
self.action(c='act_sword', m='start')
else:
sleep_time = need_nums - int(nums)
print sleep_time
time.sleep(sleep_time*50)
#print self.action(c='act_sword',m='battle',touid='260000484980')
def awaken_copy(self):#每日
self.action(c='awaken_copy',m='index')
self.action(c='awaken_copy',m='every_reward_index')
self.action(c='awaken_copy',m='get_every_reward',b=1)
def countrymine(self):#每日国家战功
#占领采矿
self.action(c='countrymine',m='caikuang',p=3,id=3,t=5)
def get_countrymine(self):
#收集矿
self.action(c='countrymine',m='index')
info = self.action(c='countrymine',m='get_countrymine_info',p=3,id=3,t=5)
timeinfo = info['info']['time']
#采集矿
print timeinfo
print type(timeinfo)
if timeinfo == 0 :
self.action(c='countrymine', m='get_reward',s=3)
else:
time.sleep(timeinfo+10)
self.action(c='countrymine', m='get_reward',s=3)
def mouth_card(self):
#月卡奖励
self.action(c='month_card',m='get_reward')
def beauty(self):#铜雀台互动
status = 1
while status == 1:
status = self.action(c='beauty',m='active_action',beauty_id=2,type=1)['status']
def country(self):#每日国家奖励
self.action(c='country',m='get_salary')
def countrysacrifice(self):#每日贡献
print self.action(c='countrysacrifice', m='index', id=1)
print self.action(c='countrysacrifice',m='action',id=1)
def get_act(self):
act_info = self.action(c='member',m='index')
return act_info
def main():
action = SaoDangFb()
action.arena()# 获取每日演武奖
action.qiandao()#每日签到
action.hitegg()#砸蛋
action.heaven()#通天塔
action.workshop()#玉石采集
action.exploit_tree()#木材采集
action.exploit_stone()#石头采集
action.herothrone()#英雄王座
action.sanctum()#每日宝石领奖
action.generaltask()#
action.business()#每日通商
action.tower()#将魂星路
action.island()#金银洞
action.lottery()#每日抽奖
action.worldboss()#世界boos
action.copies() # 扫荡副本
action.mount_stone()#每日大马副本
action.awaken_copy()#觉醒奖励
action.dice()#国家摇色子
action.mouth_card()#月卡奖励
action.beauty()#铜雀台互动
action.country()#国家贡献
action.overseastrade() # 海外贸易
action.countrysacrifice()
for i in range(3):
action.countrymine()
action.get_countrymine()
if action.get_act()['act_steadily'] == 1:
status = 1
while status == 1:
status = a.act_steadily()
if __name__ == '__main__':
action = SaoDangFb()
now_time = time.strftime('%H:%M:%S')
while True:
now_time = time.strftime('%H:%M:%S')
if now_time >= '00:01:00':
print now_time
exit(2)
else:
threading.Thread(target=action.guyu).start()
| [
"a413728161@vip.qq.com"
] | a413728161@vip.qq.com |
3289ce4ce8028ae5827693a4a4d62674fdb8867e | a3af97d1110a7e60c53932e5cfbc0831ecb3ec78 | /projetos/urls.py | 35c61b22247a784fb59597c2470bfb48719eb06c | [] | no_license | guedesemerson/navedex | 7c33b291a72caeab29891138d3268702f7dc2dd7 | 0dc2fbaec4f7a7f7b9203a2bb11e5e0147355800 | refs/heads/master | 2022-12-03T12:49:55.490910 | 2020-08-16T20:43:13 | 2020-08-16T20:43:13 | 288,010,754 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | from django.urls import path
from .api.viewsets import (Index,
Store,
Delete,
Update,
Retrieve)
urlpatterns = [
path('index', Index.as_view()),
path('store', Store.as_view()),
path('show/<int:id>', Retrieve.as_view()),
path('delete/<int:id>', Delete.as_view()),
path('update/<int:id>', Update.as_view()),
] | [
"guedes.emerson@hotmail.com"
] | guedes.emerson@hotmail.com |
c0e93c7a79a1902452e4c9ddcc8ce49f3d5f8f9a | 2aace9bb170363e181eb7520e93def25f38dbe5c | /build/idea-sandbox/system/python_stubs/cache/b06f7eade2cb0285fb36ade60484d980cb6f2d68def364bdccaea8075ac242f5/xml/parsers/expat/model.py | 540a8828ad5f498d0fa72cf68e8e707fda7dd85d | [] | no_license | qkpqkp/PlagCheck | 13cb66fd2b2caa2451690bb72a2634bdaa07f1e6 | d229904674a5a6e46738179c7494488ca930045e | refs/heads/master | 2023-05-28T15:06:08.723143 | 2021-06-09T05:36:34 | 2021-06-09T05:36:34 | 375,235,940 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 565 | py | # encoding: utf-8
# module xml.parsers.expat.model calls itself pyexpat.model
# from C:\Users\Doly\Anaconda3\lib\site-packages\sklearn\linear_model\cd_fast.cp37-win_amd64.pyd
# by generator 1.147
""" Constants used to interpret content model information. """
# no imports
# Variables with simple values
XML_CQUANT_NONE = 0
XML_CQUANT_OPT = 1
XML_CQUANT_PLUS = 3
XML_CQUANT_REP = 2
XML_CTYPE_ANY = 2
XML_CTYPE_CHOICE = 5
XML_CTYPE_EMPTY = 1
XML_CTYPE_MIXED = 3
XML_CTYPE_NAME = 4
XML_CTYPE_SEQ = 6
__loader__ = None
__spec__ = None
# no functions
# no classes
| [
"qinkunpeng2015@163.com"
] | qinkunpeng2015@163.com |
e8ea68060044e9243443442ed10abde618032aab | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_2493.py | 547646f257ecd97f0912b31b5827cd7e641734ac | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | # sqlobject: No connection has been defined for this thread or process
from sqlobject import sqlhub, connectionForURI
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
49a25dceb73a4332be16ac355cb8064b302b6927 | cb4e07b2a5dd30804ce428ec84d9e9f77709fcd5 | /swea/D4/5678. [Professional] 팰린드롬.py | 9a27a11b5210bc71a6372ee402db6ea3af5d54d6 | [] | no_license | jbsam2/algo_problem | 141c17003e88a69afdeea93a723e7f27c4626fdc | 18f2cab5a9af2dec57b7fd6f8218badd7de822e4 | refs/heads/master | 2023-05-18T10:03:00.408300 | 2021-06-02T10:36:50 | 2021-06-02T10:36:50 | 282,104,637 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | for t in range(int(input())):
s=input();r=1
for i in range(1,len(s)+1):
for j in range(len(s)-i+1):
p=s[j:j+i]
if p==p[::-1]:r=max(r,i)
print(f'#{t+1}',r) | [
"kbsam2@gmail.com"
] | kbsam2@gmail.com |
3bcca1c4065ab91c564c8c4f32a5519e832278be | fc72eba186256fa8b7f2f6b27b802af6bcd5c5c3 | /patterns/5.py | 422e82dad5bb7c5c5633c444796aca4647bfcda2 | [
"MIT"
] | permissive | OmnesRes/GRIMMER | 8d33878cf00d9e241cca4c52f603e4250478534f | 173c99ebdb6a9edb1242d24a791d0c5d778ff643 | refs/heads/master | 2021-01-21T17:10:23.159315 | 2017-04-15T16:38:09 | 2017-04-15T16:38:09 | 66,272,473 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 439 | py | pattern_zero=[0.0, 0.16, 0.24, 0.4, 0.56, 0.64, 0.8, 0.96]
pattern_odd=[0.04, 0.2, 0.36, 0.44, 0.6, 0.76, 0.84]
pattern_even=[0.0, 0.16, 0.24, 0.4, 0.56, 0.64, 0.8, 0.96]
averages_even={0.0: [0.0], 0.8: [0.0], 0.4: [0.0], 0.96: [0.8, 0.2], 0.24: [0.6, 0.4], 0.16: [0.2, 0.8], 0.56: [0.8, 0.2], 0.64: [0.6, 0.4]}
averages_odd={0.36: [0.8, 0.2], 0.2: [0.0], 0.44: [0.6, 0.4], 0.6: [0.0], 0.76: [0.2, 0.8], 0.84: [0.6, 0.4], 0.04: [0.6, 0.4]} | [
"jordananaya@gmail.com"
] | jordananaya@gmail.com |
0a6e8403448839c6d5c96c2b39d63e8a94000c98 | 54857571461a579bed30cee27871aaa5fe396bcc | /nltk-0.9.7/src/nltk_contrib/lambek/term.py | 0f187a08d18a3a5bce2aa10e6396b3234b76face | [] | no_license | ahmedBazaz/affective-text-classification | 78375182e800b39e0e309e8b469e273c0d9590f0 | 719e9b26e60863c620662564fb9cfeafc004777f | refs/heads/master | 2021-01-10T14:50:01.100274 | 2009-01-09T03:59:01 | 2009-01-09T03:59:01 | 48,296,612 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,561 | py | #!/usr/bin/python
#
# term.py
#
# Edward Loper
# Created [12/10/00 01:58 PM]
# $Id$
#
"""Lambda calculus stuff"""
import types, re
from copy import deepcopy
class Term:
#FREEVAR_NAME = ['e', 'd', 'c', 'b', 'a']
FREEVAR_NAME = ['$\\epsilon$', '$\\delta$', '$\\gamma$', \
'$\\beta$', '$\\alpha$']
BOUNDVAR_NAME = ['z', 'y', 'x']
def __init__(self):
raise TypeError("Term is an abstract class")
class Var(Term):
_max_id = 0
def __init__(self):
Var._max_id += 1
self.id = Var._max_id
def __repr__(self):
return '?' + `self.id`
def pp(self, pp_varmap=None):
if pp_varmap == None: pp_varmap = make_pp_varmap(self)
return pp_varmap[self]
def to_latex(self, pp_varmap=None):
if pp_varmap == None: pp_varmap = make_pp_varmap(self)
return '\\textit{'+pp_varmap[self]+'}'
def __hash__(self):
return self.id
def __cmp__(self, other):
if isinstance(other, Var) and other.id == self.id: return 0
else: return -1
class Const(Term):
def __init__(self, name):
if type(name) != types.StringType:
raise TypeError("Expected a string name")
self.name = name
def __repr__(self):
return self.name
def pp(self, pp_varmap=None):
return self.name
def to_latex(self, pp_varmap=None):
return '\\textbf{'+self.name+'}'
def __cmp__(self, other):
if isinstance(other, Const) and other.name == self.name:
return 0
else: return -1
class Appl(Term):
def __init__(self, func, arg):
self.func = func
self.arg = arg
if not isinstance(self.func, Term) or \
not isinstance(self.arg, Term):
raise TypeError('Expected Term argument', func, arg)
def __repr__(self):
if isinstance(self.func, Appl) or \
isinstance(self.func, Abstr):
return '('+`self.func` + ')(' + `self.arg` + ')'
else:
return `self.func` + '(' + `self.arg` + ')'
def pp(self, pp_varmap=None):
if pp_varmap == None: pp_varmap = make_pp_varmap(self)
if isinstance(self.func, Appl) or \
isinstance(self.func, Abstr):
return '(' + self.func.pp(pp_varmap) + ')(' + \
self.arg.pp(pp_varmap) + ')'
else:
return self.func.pp(pp_varmap) + '(' + \
self.arg.pp(pp_varmap) + ')'
def to_latex(self, pp_varmap=None):
if pp_varmap == None: pp_varmap = make_pp_varmap(self)
if isinstance(self.func, Appl) or \
isinstance(self.func, Abstr):
return '\\left(' + self.func.to_latex(pp_varmap) + \
'\\right)\\left(' + \
self.arg.to_latex(pp_varmap) + '\\right)'
else:
return self.func.to_latex(pp_varmap) + '(' + \
self.arg.to_latex(pp_varmap) + ')'
def __cmp__(self, other):
if isinstance(other, Appl) and other.func == self.func and \
other.arg == self.arg: return 0
else: return -1
class Abstr(Term):
def __init__(self, var, body):
self.var = var
self.body = body
if not isinstance(self.var, Var) or \
not isinstance(self.body, Term):
raise TypeError('Expected Var and Term arguments')
def __repr__(self):
if isinstance(self.body, Abstr) or \
isinstance(self.body, Appl):
return '(\\' + `self.var` + '.' + `self.body`+')'
else:
return '\\' + `self.var` + '.' + `self.body`
def pp(self, pp_varmap=None):
if pp_varmap == None: pp_varmap = make_pp_varmap(self)
if isinstance(self.body, Abstr) or \
isinstance(self.body, Appl):
return '(' + '\\' + self.var.pp(pp_varmap) + '.' + \
self.body.pp(pp_varmap) + ')'
else:
return '\\' + self.var.pp(pp_varmap) + '.' + \
self.body.pp(pp_varmap)
def to_latex(self, pp_varmap):
if pp_varmap == None: pp_varmap = make_pp_varmap(self)
if isinstance(self.body, Abstr) or \
isinstance(self.body, Appl):
return '\\left(' + '\\lambda ' + self.var.to_latex(pp_varmap) + \
'.' + self.body.to_latex(pp_varmap) + '\\right)'
else:
return '\\lambda' + self.var.to_latex(pp_varmap) + \
'.' + self.body.to_latex(pp_varmap)
def __cmp__(self, other):
if isinstance(other, Abstr) and \
self.body == replace(other.var, self.var, other.body):
return 0
else: return -1
class Tuple(Term):
def __init__(self, left, right):
self.left = left
self.right = right
if not isinstance(self.left, Term) or \
not isinstance(self.right, Term):
raise TypeError('Expected Term arguments')
def __repr__(self):
return '<'+`self.left`+', '+`self.right`+'>'
def pp(self, pp_varmap=None):
if pp_varmap == None: pp_varmap = make_pp_varmap(self)
return '<'+self.left.pp(pp_varmap)+', '+\
self.right.pp(pp_varmap)+'>'
def to_latex(self, pp_varmap=None):
if pp_varmap == None: pp_varmap = make_pp_varmap(self)
return '\\left\\langle'+self.left.to_latex(pp_varmap)+', '+\
self.right.to_latex(pp_varmap)+'\\right\\rangle'
def __cmp__(self, other):
if isinstance(other, Tuple) and other.left == self.left and \
other.right == self.right: return 0
else: return -1
def make_pp_varmap(term):
return extend_pp_varmap({}, term)
def extend_pp_varmap(pp_varmap, term):
# Get free and bound vars
free = freevars(term)
bound = boundvars(term)
# Get the remaining names.
freenames = [n for n in Term.FREEVAR_NAME \
if n not in pp_varmap.values()]
boundnames = Term.BOUNDVAR_NAME[:]
for fv in free:
if not pp_varmap.has_key(fv):
if freenames == []:
pp_varmap[fv] = `fv`
else:
pp_varmap[fv] = freenames.pop()
for bv in bound:
if not pp_varmap.has_key(bv):
if boundnames == []:
pp_varmap[bv] = `bv`
else:
pp_varmap[bv] = boundnames.pop()
return pp_varmap
class VarMap:
def __init__(self):
self._map = {}
def add(self, var, term):
if self._map.has_key(var):
if term != None and term != self._map[var]:
# Unclear what I should do here -- for now, just pray
# for the best. :)
None
else:
self._map[var] = term
def __repr__(self):
return `self._map`
def _get(self, var, orig, getNone=1):
val = self._map[var]
if not getNone and val == None: return var
if not isinstance(val, Var): return val
if val == orig:
#print 'WARNING: CIRCULAR LOOP'
# Break the loop at an arbitrary point.
del(self._map[val])
return val
elif self._map.has_key(val):
return(self._get(val, orig, getNone))
else:
return val
def __getitem__(self, var):
if self._map.has_key(var):
return self._get(var, var, 1)
else:
return var
def simplify(self, var):
if self._map.has_key(var):
return self._get(var, var, 0)
else:
return var
def copy(self):
result = VarMap()
result._map = self._map.copy()
return result
def __add__(self, other):
result = self.copy()
for var in other._map.keys():
result.add(var, other[var])
return result
def copy_from(self, other):
self._map = other._map.copy()
def force(self, var, term):
self._map[var] = term
# Use a varmap to simplify an term.
def simplify(term, varmap):
if isinstance(term, Var):
e = varmap.simplify(term)
if e == term or e == None:
return term
else:
return simplify(e, varmap)
if isinstance(term, Appl):
return Appl(simplify(term.func, varmap), \
simplify(term.arg, varmap))
if isinstance(term, Tuple):
return Tuple(simplify(term.left, varmap), \
simplify(term.right, varmap))
if isinstance(term, Abstr):
return Abstr(term.var, simplify(term.body, varmap))
if isinstance(term, Const):
return term
_VERBOSE = 0
def unify(term1, term2, varmap=None, depth=0):
if _VERBOSE: print ' '*depth+'>> unify', term1, term2, varmap
term1 = reduce(term1)
term2 = reduce(term2)
if varmap == None: varmap = VarMap()
old_varmap = varmap.copy()
result = unify_oneway(term1, term2, varmap, depth+1)
if result:
if _VERBOSE:
print ' '*depth+'<<unify', term1, term2, varmap, '=>', result
return result
varmap.copy_from(old_varmap)
result = unify_oneway(term2, term1, varmap, depth+1)
if result:
if _VERBOSE:
print ' '*depth+'<<unify', term1, term2, varmap, '=>', result
return result
#raise(ValueError("can't unify", term1, term2, varmap))
if _VERBOSE:
print ' '*depth+'unify', term1, term2, varmap, '=>', None
return None
#### THIS FUNCTION IS CURRENTLY PARTIALLY BROKEN
# Possible pairings:
# var <-> abstr
# var <-> appl
# var <-> var
# var <-> const
# abstr <-> abstr
# abstr <-> apl
# apl <-> apl
# const <-> const
# tuple <-> tuple
#
def unify_oneway(term1, term2, varmap, depth):
term1 = reduce(term1)
term2 = reduce(term2)
# Identical
if term1 == term2: return term1
# If term1 is a var in varmap, get its value...
if isinstance(term1, Var):
if varmap[term1] != None:
term1 = varmap[term1]
# Variable
if isinstance(term1, Var):
if varmap[term1] == None:
# It's a bound var
if term1 == term2: return term1
else: return None
elif term1 in freevars(term2):
if term1 == term2: return term1
else: return None
else:
# Eliminate it.
varmap.add(term1, term2)
return term2
# Tuple
if isinstance(term1, Tuple):
if isinstance(term2, Tuple):
left = unify(term1.left, term2.left, varmap, depth)
right = unify(term1.right, term2.right, varmap, depth)
if left != None and right != None:
return Tuple(left, right)
# Abstraction
if isinstance(term1, Abstr):
if isinstance(term2, Abstr):
x = Var()
body1 = replace(term1.var, x, term1.body)
body2 = replace(term2.var, x, term2.body)
varmap.force(term1.var, x)
varmap.force(term2.var, x)
varmap.add(x, None)
abstr = Abstr(x, unify(body1, body2, varmap, depth))
return abstr
if isinstance(term2, Appl):
## ***** There is a way to do this, but I haven't figured
## ***** it out yet.
return None
if isinstance(term1, Appl):
if isinstance(term2, Appl):
# Try unifying func and arg..
old_varmap = varmap.copy()
func = unify(term1.func, term2.func, varmap, depth)
arg = unify(term1.arg, term2.arg, varmap, depth)
if func != None and arg != None:
return Appl(func, arg)
varmap.copy_from(old_varmap)
# If the functor of term1 is a variable, try instantiating
# it as a lambda term of some sort.
if isinstance(term1.func, Var) and \
varmap[term1.func] != None and \
isinstance(term1.arg, Var):
x = Var()
body = replace(term1.arg, x, term2)
# I need some sort of check here!!
abstr = Abstr(x, body)
varmap.add(x, None)
varmap.add(term1.func, abstr)
return term2
if isinstance(term1, Const):
if term1 == term2: return term1
else: return None
return None
def replace(oldval, newval, term):
"Replace all occurances of oldval with newval in term"
if term == oldval:
return newval
elif isinstance(term, Appl):
return Appl(replace(oldval, newval, term.func),\
replace(oldval, newval, term.arg))
elif isinstance(term, Abstr):
if (oldval == term.var):
return term
else:
return Abstr(term.val, replace(oldval, newval, term.body))
elif isinstance(term, Tuple):
return Tuple(replace(oldval, newval, term.left),
replace(oldval, newval, term.right))
else:
return term
def union(lst1, lst2):
lst = lst1[:]
for elt in lst2:
if elt not in lst:
lst.append(elt)
return lst
def freevars(term):
if isinstance(term, Var):
return [term]
elif isinstance(term, Appl):
return union(freevars(term.func), freevars(term.arg))
elif isinstance(term, Abstr):
return [var for var in freevars(term.body) if var != term.var]
elif isinstance(term, Tuple):
return union(freevars(term.left), freevars(term.right))
else:
return []
def vars(term):
if isinstance(term, Var):
return [term]
elif isinstance(term, Appl):
return union(vars(term.func), vars(term.arg))
elif isinstance(term, Abstr):
return union(vars(term.body), [term.var])
elif isinstance(term, Tuple):
return union(vars(term.left), vars(term.right))
else:
return []
def boundvars(term):
free = freevars(term)
return [var for var in vars(term) if var not in free]
def reduce(term):
if isinstance(term, Var) or isinstance(term, Const):
return term
if isinstance(term, Tuple):
return Tuple(reduce(term.left), reduce(term.right))
if isinstance(term, Appl):
# Reduce the function and argument
func = reduce(term.func)
arg = reduce(term.arg)
if isinstance(func, Abstr):
return reduce(replace(func.var, arg, func.body))
else:
return Appl(func, arg)
if isinstance(term, Abstr):
# Reduce the body
var = term.var
body = reduce(term.body)
if isinstance(body, Appl) and \
body.arg == var and \
var not in freevars(body.func):
return body.func
else:
return Abstr(var, body)
# Strip outermost parens from a string.. inefficient, but simple :)
def strip_parens(str):
if len(str) < 2 or str[0] != '(' or str[-1] != ')': return str
depth = 0
for c in str[:-1]:
if c == '(': depth += 1
if c == ')': depth -= 1
if depth == 0: return str
return strip_parens(str[1:-1])
def extract_tuple(str):
if str[0] != '<' or str[-1] != '>': return None
comma = None
depth = 1
for i in range(1, len(str)-1):
if str[i] in '(<': depth += 1
if str[i] in ')>': depth -= 1
if depth == 1 and str[i] == ',':
if comma == None: comma = i
else: raise ValueError('bad tuple')
if depth == 0: return None
if comma == None: raise ValueError('bad tuple', str)
return (str[1:comma], str[comma+1:-1])
# Maps str -> lambda term.
# Vars should start with '?'
def parse_term(str, varmap=None):
if varmap == None: varmap = {}
str = strip_parens(str.strip())
# Abstractions with numbered vars
abstr = re.match(r'\\\?([^\.]+)\.(.*)', str)
if abstr:
(varname, body) = abstr.groups()
var = Var()
varmap[varname]=var
return Abstr(var, parse_term(body, varmap))
# Tuple
tuple = extract_tuple(str)
if tuple:
return Tuple(parse_term(tuple[0], varmap), \
parse_term(tuple[1], varmap))
# Application
if '(' in str:
depth = 0
for i in range(len(str)):
if str[i] in '(<':
if depth == 0 and i > 0: break
else: depth += 1
if str[i] in ')>':
depth -= 1
func = parse_term(str[:i], varmap)
arg = parse_term(str[i:], varmap)
return Appl(func, arg)
# Variable
var = re.match(r'\?(.*)', str)
if var:
varname = var.groups()[0]
if varmap.has_key(varname):
return varmap[varname]
else:
var = Var()
varmap[varname] = var
return var
# Constant
return Const(str)
def test():
x = Var()
y = Var()
z = Var()
c = Const('c')
f1 = Appl(Abstr(x, Appl(x, c)), z)
f2 = Appl(Abstr(x, Appl(c, x)), z)
f3 = Abstr(x, Appl(c, x))
f4 = Abstr(y, Appl(c, y))
print f1, '=>', reduce(f1)
print f2, '=>', reduce(f2)
print f3, '=>', reduce(f3)
print f1.pp()
print f2.pp()
print f3.pp()
print
print unify(x, y)
print unify(x, c)
print unify(x, f1)
print unify(f3, f4)
print unify(Abstr(x,Appl(x,x)), Abstr(y,Appl(y,y)))
print parse_term('<(\?var.<const,const2>(?var))(?other_var),?x>').pp()
reduce(parse_term('<a,b>'))
if __name__ == '__main__':
test()
| [
"tytung@6129d76e-ddfe-11dd-a37d-c9d1c40e0883"
] | tytung@6129d76e-ddfe-11dd-a37d-c9d1c40e0883 |
0bc503d7f437039e8371dc85dd16dfbd0087202e | 9e7b9e91b8425061a5ad36e0dd630a799ec79f6f | /pytorch_practice8.py | 38003a6265cce7e570f9a5621520f9e2f1ce6ef8 | [] | no_license | OlgaBelitskaya/colab_notebooks | c27fad60f7e4ca35287e2561487b5d9d82efde43 | d568149c8bcfb0025f7b09120ca44f639ac40efe | refs/heads/master | 2023-07-07T23:02:49.289280 | 2021-08-14T08:16:38 | 2021-08-14T08:16:38 | 158,067,383 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 9,270 | py | # -*- coding: utf-8 -*-
"""pytorch_practice8.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1mVSQXzQC_4tC0Zp9zY2nH1NvBGjubwRZ
Reading classics [Deep Learning Models](https://nbviewer.jupyter.org/github/rasbt/deeplearning-models/blob/master/pytorch_ipynb/cnn/cnn-alexnet-cifar10.ipynb)
## Code Modules, Classes & Functions
"""
import numpy as np,pandas as pd,pylab as pl
import h5py,torch,urllib,zipfile
from tensorflow import image as timage
from torchvision.datasets import CIFAR10 as tcifar10
from torchvision import transforms,utils
from torch.utils.data import DataLoader as tdl
from torch.utils.data import Dataset as tds
from torch.utils.data.dataset import Subset
import torch.nn as tnn
from IPython.core.magic import register_line_magic
dev=torch.device("cuda:0" if torch.cuda.is_available()
else "cpu")
class TData(tds):
def __init__(self,X,y):
self.X=torch.tensor(X,dtype=torch.float32)
self.y=torch.tensor(y,dtype=torch.int32)
def __getitem__(self,index):
train_img,train_lbl=self.X[index],self.y[index]
return train_img,train_lbl
def __len__(self):
return self.y.shape[0]
class AlexNN(tnn.Module):
def __init__(self,num_classes):
super(AlexNN,self).__init__()
self.features=tnn.Sequential(
tnn.Conv2d(3,64,kernel_size=11,
stride=4,padding=2),
tnn.ReLU(inplace=True),
tnn.MaxPool2d(kernel_size=3,stride=2),
tnn.Conv2d(64,192,kernel_size=5,padding=2),
tnn.ReLU(inplace=True),
tnn.MaxPool2d(kernel_size=3,stride=2),
tnn.Conv2d(192,384,kernel_size=3,padding=1),
tnn.ReLU(inplace=True),
tnn.Conv2d(384,256,kernel_size=3,padding=1),
tnn.ReLU(inplace=True),
tnn.Conv2d(256,256,kernel_size=3,padding=1),
tnn.ReLU(inplace=True),
tnn.MaxPool2d(kernel_size=3,stride=2))
self.avgpool=tnn.AdaptiveAvgPool2d((6,6))
self.classifier=tnn.Sequential(
tnn.Dropout(.5),
tnn.Linear(256*6*6,4096),
tnn.ReLU(inplace=True),
tnn.Dropout(.5),
tnn.Linear(4096,4096),
tnn.ReLU(inplace=True),
tnn.Linear(4096,num_classes))
def forward(self,x):
x=self.features(x)
x=self.avgpool(x)
x=x.view(x.size(0),256*6*6)
logits=self.classifier(x)
probs=tnn.functional.softmax(logits,dim=1)
return logits,probs
def model_acc(model,data_loader):
correct_preds,num_examples=0,0
for features,targets in data_loader:
features=features.to(dev)
targets=targets.to(dev)
logits,probs=model(features)
_,pred_labels=torch.max(probs,1)
num_examples+=targets.size(0)
correct_preds+=(pred_labels==targets).sum()
return correct_preds.float()/num_examples*100
def display_examples(data_loader):
for images,labels in data_loader:
print('Image dimensions: %s'%str(images.shape))
print('Label dimensions: %s'%str(labels.shape))
n=np.random.randint(1,50)
fig=pl.figure(figsize=(11,4))
for i in range(n,n+5):
ax=fig.add_subplot(1,5,i-n+1,\
xticks=[],yticks=[],title=labels[i].item())
ax.imshow((images[i]).reshape(64,64,3))
break
def show_image(img):
npimg=img.numpy()/2.+.5; tr=(1,2,0)
pl.figure(figsize=(10,2))
pl.imshow(np.transpose(npimg,tr))
pl.xticks([]); pl.show()
def show_examples(train_loader,classes,num_examples):
dataiter=iter(train_loader)
images,labels=dataiter.next()
show_image(utils.make_grid(images[0:num_examples]))
print('^'.join('%9s'%classes[labels[j]]
for j in range(num_examples)),end='^')
# Commented out IPython magic to ensure Python compatibility.
@register_line_magic
def train_run(epochs):
epochs=int(epochs)
for epoch in range(epochs):
model.train()
for batch_ids,(features,targets) in enumerate(train_loader):
features=features.to(dev); targets=targets.to(dev)
logits,probs=model(features)
cost=tnn.functional.cross_entropy(logits,targets)
optimizer.zero_grad(); cost.backward()
optimizer.step()
if not batch_ids%200:
print ('Epoch: %03d/%03d | Batch %03d/%03d | Cost: %.4f'
# %(epoch+1,epochs,batch_ids,
len(train)//batch_size,cost))
model.eval()
with torch.set_grad_enabled(False):
print('Epoch: %03d/%03d train acc: %.2f%% valid acc: %.2f%%'%\
(epoch+1,epochs,
model_acc(model,train_loader),
model_acc(model,valid_loader)))
@register_line_magic
def train_run2(epochs):
epochs=int(epochs)
for epoch in range(epochs):
model.train()
for batch_ids,(features,targets) in enumerate(train_loader2):
features=features.to(dev); targets=targets.to(dev)
logits,probs=model(features)
cost=tnn.functional.cross_entropy(logits,targets.long())
optimizer.zero_grad(); cost.backward()
optimizer.step()
if not batch_ids%50:
print ('Epoch: %03d/%03d | Batch %03d/%03d | Cost: %.4f'
# %(epoch+1,epochs,batch_ids,
len(train2)//batch_size2,cost))
model.eval()
with torch.set_grad_enabled(False):
print('Epoch: %03d/%03d train acc: %.2f%% valid acc: %.2f%%'%\
(epoch+1,epochs,
model_acc(model,train_loader2),
model_acc(model,valid_loader2)))
@register_line_magic
def print_acc(n):
if int(n)==1:
data_loader=\
[train_loader,valid_loader,test_loader]
if int(n)==2:
data_loader=\
[train_loader2,valid_loader2,test_loader2]
print('Train accuracy: %.4f%%'%\
(model_acc(model,data_loader[0])))
print('Valid accuracy: %.4f%%'%\
(model_acc(model,data_loader[1])))
print('Test accuracy: %.4f%%'%\
(model_acc(model,data_loader[2])))
"""## Data"""
random_seed=1; batch_size=128
train_ids=torch.arange(0,44000)
valid_ids=torch.arange(44000,50000)
tr0=(.5,.5,.5)
train_transform=transforms\
.Compose([transforms.Resize((70,70)),
transforms.RandomCrop((64,64)),
transforms.ToTensor(),
transforms.Normalize(tr0,tr0)])
test_transform=transforms\
.Compose([transforms.Resize((70,70)),
transforms.CenterCrop((64,64)),
transforms.ToTensor(),
transforms.Normalize(tr0,tr0)])
train_valid=tcifar10(root='data',train=True,download=True,
transform=train_transform)
train=Subset(train_valid,train_ids)
valid=Subset(train_valid,valid_ids)
test=tcifar10(root='data',train=False,
transform=test_transform)
train_loader=tdl(dataset=train,shuffle=True,
batch_size=batch_size)
valid_loader=tdl(dataset=valid,shuffle=True,
batch_size=batch_size)
test_loader=tdl(dataset=test,shuffle=False,
batch_size=batch_size)
classes=('plane','car','bird','cat','deer',
'dog','frog','horse','ship','truck')
show_examples(valid_loader,classes,7)
fpath='https://olgabelitskaya.github.io/'
zf='LetterColorImages_123.h5.zip'
input_file=urllib.request.urlopen(fpath+zf)
output_file=open(zf,'wb');
output_file.write(input_file.read())
output_file.close(); input_file.close()
zipf=zipfile.ZipFile(zf,'r')
zipf.extractall(''); zipf.close()
f=h5py.File(zf[:-4],'r')
keys=list(f.keys()); print(keys)
x=np.array(f[keys[1]],dtype='float32')/255
x=np.array(timage.resize(x,[64,64]))\
.reshape(-1,3,64,64)
y=np.array(f[keys[2]],dtype='int32')-1
N=len(y); n=int(.1*N)
shuffle_ids=np.arange(N)
np.random.RandomState(23).shuffle(shuffle_ids)
x,y=x[shuffle_ids],y[shuffle_ids]
x_test,x_valid,x_train=x[:n],x[n:2*n],x[2*n:]
y_test,y_valid,y_train=y[:n],y[n:2*n],y[2*n:]
x_valid.shape,y_valid.shape
random_seed=1; batch_size2=128
train2=TData(x_train,y_train)
valid2=TData(x_valid,y_valid)
test2=TData(x_test,y_test)
train_loader2=tdl(dataset=train2,batch_size=batch_size2,shuffle=True)
valid_loader2=tdl(dataset=valid2,batch_size=batch_size2,shuffle=True)
test_loader2=tdl(dataset=test2,batch_size=batch_size2,shuffle=False)
display_examples(valid_loader2)
"""## AlexNet Training"""
torch.manual_seed(random_seed)
num_classes=10; learning_rate=.0001
model=AlexNN(num_classes)
model.to(dev)
optimizer=torch.optim.Adam(model.parameters(),
lr=learning_rate)
# Commented out IPython magic to ensure Python compatibility.
# %train_run 10
# Commented out IPython magic to ensure Python compatibility.
# %print_acc 1
torch.manual_seed(random_seed)
num_classes=33; learning_rate=.0001
model=AlexNN(num_classes)
model.to(dev)
optimizer=torch.optim.Adam(model.parameters(),
lr=learning_rate)
# Commented out IPython magic to ensure Python compatibility.
# %train_run2 30
# Commented out IPython magic to ensure Python compatibility.
# %print_acc 2 | [
"safuolga@gmail.com"
] | safuolga@gmail.com |
3aca9655153ffcd55b21dee39c568f740c904512 | 2827d7a837eb29c3cb07793ab6d3d5a753e18669 | /alipay/aop/api/request/KoubeiTradeItemorderBuyRequest.py | 037cc2e3a3272dc13f6b546ae46b7488e413434a | [
"Apache-2.0"
] | permissive | shaobenbin/alipay-sdk-python | 22e809b8f5096bec57d2bb25414f64bdc87fa8b3 | 5232ad74dff2e8a6e0e7646ab3318feefa07a37d | refs/heads/master | 2020-03-21T04:51:39.935692 | 2018-06-21T07:03:31 | 2018-06-21T07:03:31 | 138,131,022 | 0 | 0 | null | 2018-06-21T06:50:24 | 2018-06-21T06:50:24 | null | UTF-8 | Python | false | false | 3,936 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.KoubeiTradeItemorderBuyModel import KoubeiTradeItemorderBuyModel
class KoubeiTradeItemorderBuyRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, KoubeiTradeItemorderBuyModel):
self._biz_content = value
else:
self._biz_content = KoubeiTradeItemorderBuyModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._notify_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'koubei.trade.itemorder.buy'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| [
"liuqun.lq@alibaba-inc.com"
] | liuqun.lq@alibaba-inc.com |
de4ac45769abdafc96febca8486bb551b87659b0 | 412f4a6a5853486515136e1dc2fb2a98b1de8b78 | /flaskthreads.py | c05ead2c625c7877ef02c7a494e5a4c814a631e4 | [] | no_license | zwlyn/test | a43fca97af377e90fb0687c4dbf9f8e836365d5d | 91084cb42b959635c0071184222c4e0fdf077563 | refs/heads/master | 2020-08-21T13:45:47.323104 | 2019-10-22T10:03:35 | 2019-10-22T10:03:35 | 216,173,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | #-*- encoding:utf_8 -*-
from flask import Flask
app = Flask(__name__)
@app.route('/', methods = ['GET'])
def demo():
return "gunicorn and flask demo"
if __name__ == '__main__':
app.run(debug=True,threaded=True)
| [
"1666013677@qq.com"
] | 1666013677@qq.com |
3ebf236d3782198f90f3912a4a7291f05d6af50a | 3cbf4a9d14cd487520f2bd05db10542705a37baf | /h2o-py/tests/testdir_algos/gbm/pyunit_mnist_manyCols_gbm_large.py | a1a2d84e7b47c80122b6ee0faa516ca2e442bdff | [
"Apache-2.0"
] | permissive | KendraFabric/h2o-3 | 733ff021553ff2c2d8f0c3336450d886d029cf15 | c75bc5d2dc644cc8c09df755185a4cc6e34e0d1a | refs/heads/master | 2023-03-15T12:32:02.852026 | 2016-08-26T14:01:07 | 2016-08-26T14:27:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 714 | py | import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
def mnist_many_cols_gbm_large():
train = h2o.import_file(path=pyunit_utils.locate("bigdata/laptop/mnist/train.csv.gz"))
train.tail()
from h2o.estimators.gbm import H2OGradientBoostingEstimator
gbm_mnist = H2OGradientBoostingEstimator(ntrees=1,
max_depth=1,
min_rows=10,
learn_rate=0.01)
gbm_mnist.train(x=range(784), y=784, training_frame=train)
gbm_mnist.show()
if __name__ == "__main__":
pyunit_utils.standalone_test(mnist_many_cols_gbm_large)
else:
mnist_many_cols_gbm_large()
| [
"spnrpa@gmail.com"
] | spnrpa@gmail.com |
bf8fe27306543b7b8e6f4c310bdaf5a2344ad562 | bf99b1b14e9ca1ad40645a7423f23ef32f4a62e6 | /AtCoder/abc/066d_2.py | 7ae18e84240a2c3ac692397a688c5f7d69c412a1 | [] | no_license | y-oksaku/Competitive-Programming | 3f9c1953956d1d1dfbf46d5a87b56550ff3ab3db | a3ff52f538329bed034d3008e051f30442aaadae | refs/heads/master | 2021-06-11T16:14:12.635947 | 2021-05-04T08:18:35 | 2021-05-04T08:18:35 | 188,639,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,407 | py | from collections import Counter, defaultdict
class Combination:
def __init__(self, size, mod=10**9 + 7):
self.size = size + 2
self.mod = mod
self.fact = [1, 1] + [0] * size
self.factInv = [1, 1] + [0] * size
self.inv = [0, 1] + [0] * size
for i in range(2, self.size):
self.fact[i] = self.fact[i - 1] * i % self.mod
self.inv[i] = -self.inv[self.mod % i] * (self.mod // i) % self.mod
self.factInv[i] = self.factInv[i - 1] * self.inv[i] % self.mod
def npr(self, n, r):
if n < r or n < 0 or r < 0:
return 0
return self.fact[n] * self.factInv[n - r] % self.mod
def ncr(self, n, r):
if n < r or n < 0 or r < 0:
return 0
return self.fact[n] * (self.factInv[r] * self.factInv[n - r] % self.mod) % self.mod
def nhr(self, n, r): # 重複組合せ
return self.ncr(n + r - 1, n - 1)
def factN(self, n):
if n < 0:
return 0
return self.fact[n]
N = int(input())
A = list(map(int, input().split()))
MOD = 10**9 + 7
comb = Combination(N + 100)
cntA = Counter(A)
I = [a for a, c in cntA.items() if c == 2][0]
L, R = [i for i, a in enumerate(A) if a == I]
S = L + (N - R)
for i in range(1, N + 2):
if i == 1:
print(N)
continue
ans = comb.ncr(N + 1, i) - comb.ncr(S, i - 1)
print(ans % MOD)
| [
"y.oksaku@stu.kanazawa-u.ac.jp"
] | y.oksaku@stu.kanazawa-u.ac.jp |
5f38af46ffc3dc1c89cce266942afc556be35569 | e4713c248c857b06a3cb0e9d0d15dd5513b1a8e9 | /phonenumbers/shortdata/region_GW.py | 9cb488fa9b2cafd1bbdb0857102120a418808bf6 | [
"Apache-2.0",
"MIT"
] | permissive | igushev/fase_lib | 8f081e0f6b956b186dc759906b21dc3fc449f045 | 182c626193193b196041b18b9974b5b2cbf15c67 | refs/heads/master | 2023-05-14T14:35:05.727202 | 2022-04-15T23:55:37 | 2022-04-15T23:55:37 | 107,228,694 | 10 | 0 | MIT | 2023-05-01T19:38:09 | 2017-10-17T06:47:07 | Python | UTF-8 | Python | false | false | 658 | py | """Auto-generated file, do not edit by hand. GW metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_GW = PhoneMetadata(id='GW', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1\\d{2}', possible_number_pattern='\\d{3}', possible_length=(3,)),
emergency=PhoneNumberDesc(national_number_pattern='11[378]', possible_number_pattern='\\d{3}', example_number='113', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='11[378]', possible_number_pattern='\\d{3}', example_number='113', possible_length=(3,)),
short_data=True)
| [
"igushev@gmail.com"
] | igushev@gmail.com |
ea521aba29bdd5c7f046695777ae845f38e51f2d | d190750d6cb34e9d86ae96724cf4b56a2f57a74a | /tests/r/test_pntsprd.py | 7ae98fd1fc926c5de4f2ca0f970609c7bc661f1b | [
"Apache-2.0"
] | permissive | ROAD2018/observations | a119f61a48213d791de0620804adb8d21c2ad9fb | 2c8b1ac31025938cb17762e540f2f592e302d5de | refs/heads/master | 2021-09-24T04:28:02.725245 | 2018-09-16T23:06:30 | 2018-09-16T23:06:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 518 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.pntsprd import pntsprd
def test_pntsprd():
"""Test module pntsprd.py by downloading
pntsprd.csv and testing shape of
extracted data has 553 rows and 12 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = pntsprd(test_path)
try:
assert x_train.shape == (553, 12)
except:
shutil.rmtree(test_path)
raise()
| [
"dustinviettran@gmail.com"
] | dustinviettran@gmail.com |
7da564ce249013583e2e383589c22437cbb733a9 | ffe4c155e228f1d3bcb3ff35265bb727c684ec1a | /UCL/Algorithms/Exercises/set_intersection.py | f9c84d7732de0af46c37f51969b8b12cbb821533 | [] | no_license | yuuee-www/Python-Learning | 848407aba39970e7e0058a4adb09dd35818c1d54 | 2964c9144844aed576ea527acedf1a465e9a8664 | refs/heads/master | 2023-03-12T00:55:06.034328 | 2021-02-28T13:43:14 | 2021-02-28T13:43:14 | 339,406,816 | 0 | 0 | null | 2021-02-28T11:27:40 | 2021-02-16T13:26:46 | Jupyter Notebook | UTF-8 | Python | false | false | 250 | py | def set_intersection(a, b):
intersection = [i for i in a if i in b] #O(nm)
intersection = list(set(a).intersection(set(b)))
intersection = list(set(a) & set(b))
intersection = list(filter(lambda x:x in a, b))
return intersection | [
"50982416+cyndereN@users.noreply.github.com"
] | 50982416+cyndereN@users.noreply.github.com |
80052d57664b2a589bf542dd229c9c48edc61736 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/fv/asiteconnp.py | bd7efa2b92e54a40a0cde06c8751451bea5afafe | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 4,379 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ASiteConnP(Mo):
meta = ClassMeta("cobra.model.fv.ASiteConnP")
meta.isAbstract = True
meta.moClassName = "fvASiteConnP"
meta.moClassName = "fvASiteConnP"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "Intersite Anycast Connectivity Information"
meta.writeAccessMask = 0x1000001
meta.readAccessMask = 0x1000001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Comp")
meta.concreteSubClasses.add("cobra.model.fv.SiteConnP")
meta.concreteSubClasses.add("cobra.model.fv.SiteConnPDef")
meta.rnPrefixes = [
]
prop = PropMeta("str", "bgpAsn", "bgpAsn", 33120, PropCategory.REGULAR)
prop.label = "Remote site ASN"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(1, 4294967295)]
meta.props.add("bgpAsn", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5582, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "id", "id", 30384, PropCategory.REGULAR)
prop.label = "Site ID"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("id", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"rrishike@cisco.com"
] | rrishike@cisco.com |
07eda087b3ad3cee49e5a848747b2633c52ac698 | abc4a73e5f93ebf90be946b95ef215e32c823353 | /colour/colorimetry/datasets/light_sources/chromaticity_coordinates.py | 0c97510b7bd72f3c75c1c667027914082671ea87 | [
"BSD-3-Clause"
] | permissive | OmarWagih1/colour | 69f5108e83ec443551c5593c066bcd4e3596060f | bdc880a2783ff523dafb19f1233212dd03a639bd | refs/heads/develop | 2021-04-14T20:30:29.635916 | 2020-07-26T05:46:00 | 2020-07-26T05:46:00 | 249,263,927 | 0 | 0 | BSD-3-Clause | 2020-03-22T20:11:06 | 2020-03-22T20:11:06 | null | UTF-8 | Python | false | false | 21,293 | py | # -*- coding: utf-8 -*-
"""
Light Source Chromaticity Coordinates
=====================================
Defines various light source chromaticity coordinates.
The following light sources are available:
- *RIT* *PointerData.xls* spreadsheet light sources: Natural,
Philips TL-84, T8 Luxline Plus White, SA, SC, T8 Polylux 3000,
T8 Polylux 4000, Thorn Kolor-rite
- *NIST* *NIST CQS simulation 7.4.xls* spreadsheet traditional light sources:
Cool White FL, Daylight FL, HPS, Incandescent, LPS, Mercury,
Metal Halide, Neodimium Incandescent, Super HPS, Triphosphor FL
- *NIST* *NIST CQS simulation 7.4.xls* spreadsheet LED light sources:
3-LED-1 (457/540/605), 3-LED-2 (473/545/616), 3-LED-2 Yellow,
3-LED-3 (465/546/614), 3-LED-4 (455/547/623), 4-LED No Yellow,
4-LED Yellow, 4-LED-1 (461/526/576/624), 4-LED-2 (447/512/573/627),
Luxeon WW 2880, PHOS-1, PHOS-2, PHOS-3, PHOS-4,
Phosphor LED YAG
- *NIST* *NIST CQS simulation 7.4.xls* spreadsheet Philips light sources:
60 A/W (Soft White), C100S54 (HPS), C100S54C (HPS),
F32T8/TL830 (Triphosphor), F32T8/TL835 (Triphosphor),
F32T8/TL841 (Triphosphor), F32T8/TL850 (Triphosphor),
F32T8/TL865 /PLUS (Triphosphor), F34/CW/RS/EW (Cool White FL),
F34T12/LW/RS /EW, F34T12WW/RS /EW (Warm White FL),
F40/C50 (Broadband FL), F40/C75 (Broadband FL),
F40/CWX (Broadband FL), F40/DX (Broadband FL), F40/DXTP (Delux FL),
F40/N (Natural FL), H38HT-100 (Mercury), H38JA-100/DX (Mercury DX),
MHC100/U/MP /3K, MHC100/U/MP /4K, SDW-T 100W/LV (Super HPS)
- Common light sources: Kinoton 75P
References
----------
- :cite:`Houston2015a` : Borer, T. (2017). Private Discussion with Mansencal,
T. and Shaw, N.
- :cite:`Ohno2008a` : Ohno, Yoshiro, & Davis, W. (2008). NIST CQS simulation
(Version 7.4) [Computer software].
https://drive.google.com/file/d/1PsuU6QjUJjCX6tQyCud6ul2Tbs8rYWW9/view?\
usp=sharing
- :cite:`Pointer1980a` : Pointer, M. R. (1980). Pointer's Gamut Data.
http://www.cis.rit.edu/research/mcsl2/online/PointerData.xls
"""
from __future__ import division, unicode_literals
import numpy as np
from colour.utilities import CaseInsensitiveMapping
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2020 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-developers@colour-science.org'
__status__ = 'Production'
__all__ = [
'LIGHT_SOURCES_RIT_CIE_1931_2_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES_RIT_CIE_1964_10_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES_NIST_TRADITIONAL_CIE_1931_2_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES_NIST_TRADITIONAL_CIE_1964_10_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES_NIST_LED_CIE_1931_2_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES_NIST_LED_CIE_1964_10_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES_NIST_PHILIPS_CIE_1931_2_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES_NIST_PHILIPS_CIE_1964_10_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES_COMMON_CIE_1931_2_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES_COMMON_CIE_1964_10_DEGREE_STANDARD_OBSERVER',
'LIGHT_SOURCES'
]
LIGHT_SOURCES_RIT_CIE_1931_2_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'Natural':
np.array([0.381585730647787, 0.359224138274067]),
'Philips TL-84':
np.array([0.378413599970988, 0.379290254544090]),
'SA':
np.array([0.447573030734154, 0.407438137156467]),
'SC':
np.array([0.310056734303928, 0.316145704789204]),
'T8 Luxline Plus White':
np.array([0.410492204086250, 0.388932529676840]),
'T8 Polylux 3000':
np.array([0.431706082207185, 0.413877736072647]),
'T8 Polylux 4000':
np.array([0.379219473139794, 0.384469085577631]),
'Thorn Kolor-rite':
np.array([0.381919124282806, 0.374309261641251])
}))
"""
Light source chromaticity coordinates from *RIT* *PointerData.xls* spreadsheet
for *CIE 1931 2 Degree Standard Observer*.
Warning
-------
The chromaticity coordinates have been calculated from *PointerData.xls*
spreadsheet which doesn't mention the data source thus the light source names
cannot be accurately verified.
References
----------
:cite:`Pointer1980a`
LIGHT_SOURCES_RIT_CIE_1931_2_DEGREE_STANDARD_OBSERVER : CaseInsensitiveMapping
**{'Natural', 'Philips TL-84', 'T8 Luxline Plus White', 'SA', 'SC',
'T8 Polylux 3000', 'T8 Polylux 4000', 'Thorn Kolor-rite'}**
"""
LIGHT_SOURCES_RIT_CIE_1964_10_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'Natural':
np.array([0.384870991183035, 0.353869223366545]),
'Philips TL-84':
np.array([0.383592002892950, 0.373922741815762]),
'SA':
np.array([0.451176803594070, 0.405936046781591]),
'SC':
np.array([0.310388637415649, 0.319050651220986]),
'T8 Luxline Plus White':
np.array([0.416946978831203, 0.380991426462756]),
'T8 Polylux 3000':
np.array([0.439038926288670, 0.404554330124715]),
'T8 Polylux 4000':
np.array([0.385115161872875, 0.377800928395769]),
'Thorn Kolor-rite':
np.array([0.385533929282467, 0.370840492090948])
}))
"""
Light source chromaticity coordinates from *RIT* *PointerData.xls* spreadsheet
for *CIE 1964 10 Degree Standard Observer*. [1]_
LIGHT_SOURCES_RIT_CIE_1964_10_DEGREE_STANDARD_OBSERVER : CaseInsensitiveMapping
**{'Natural', 'Philips TL-84', 'T8 Luxline Plus White', 'SA', 'SC',
'T8 Polylux 3000', 'T8 Polylux 4000', 'Thorn Kolor-rite'}**
"""
LIGHT_SOURCES_NIST_TRADITIONAL_CIE_1931_2_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'Cool White FL':
np.array([0.369256318971281, 0.372549878176631]),
'Daylight FL':
np.array([0.312662993963651, 0.331985688793009]),
'HPS':
np.array([0.521677696062816, 0.417971177117239]),
'Incandescent':
np.array([0.450730217519680, 0.408046128945005]),
'LPS':
np.array([0.575151311365165, 0.424232234924905]),
'Mercury':
np.array([0.392018457637112, 0.383777071984453]),
'Metal Halide':
np.array([0.372544558972793, 0.385603925927588]),
'Neodimium Incandescent':
np.array([0.447398697052100, 0.395008601248268]),
'Super HPS':
np.array([0.470061659271846, 0.406116584248741]),
'Triphosphor FL':
np.array([0.413163268257275, 0.396422053758680])
}))
"""
Traditional light source chromaticity coordinates from *NIST*
*NIST CQS simulation 7.4.xls* spreadsheet for the
*CIE 1931 2 Degree Standard Observer*.
References
----------
:cite:`Ohno2008a`
LIGHT_SOURCES_NIST_TRADITIONAL_CIE_1931_2_DEGREE_STANDARD_OBSERVER :
CaseInsensitiveMapping
**{'Cool White FL', 'Daylight FL', 'HPS', 'Incandescent', 'LPS', 'Mercury',
'Metal Halide', 'Neodimium Incandescent', 'Super HPS', 'Triphosphor FL'}**
"""
LIGHT_SOURCES_NIST_TRADITIONAL_CIE_1964_10_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'Cool White FL':
np.array([0.376715047518455, 0.364576802118673]),
'Daylight FL':
np.array([0.317395878738965, 0.330780819136676]),
'HPS':
np.array([0.531764495177513, 0.408752715284645]),
'Incandescent':
np.array([0.454365604973572, 0.406573684216774]),
'LPS':
np.array([0.589960045887891, 0.410039954112109]),
'Mercury':
np.array([0.401266412873755, 0.364732538221183]),
'Metal Halide':
np.array([0.378786167751226, 0.377496928504661]),
'Neodimium Incandescent':
np.array([0.447516717156694, 0.396734151368497]),
'Super HPS':
np.array([0.473859567146135, 0.401381825309197]),
'Triphosphor FL':
np.array([0.418591963931736, 0.388947713332192])
}))
"""
Traditional light source chromaticity coordinates from *NIST*
*NIST CQS simulation 7.4.xls* spreadsheet for the
*CIE 1964 10 Degree Standard Observer*. [2]_
LIGHT_SOURCES_NIST_TRADITIONAL_CIE_1964_10_DEGREE_STANDARD_OBSERVER :
CaseInsensitiveMapping
**{'Cool White FL', 'Daylight FL', 'HPS', 'Incandescent', 'LPS', 'Mercury',
'Metal Halide', 'Neodimium Incandescent', 'Super HPS', 'Triphosphor FL'}**
"""
LIGHT_SOURCES_NIST_LED_CIE_1931_2_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'3-LED-1 (457/540/605)':
np.array([0.417057686949170, 0.396262457986602]),
'3-LED-2 (473/545/616)':
np.array([0.417060475566006, 0.396268120523418]),
'3-LED-2 Yellow':
np.array([0.436563079184047, 0.443649619298676]),
'3-LED-3 (465/546/614)':
np.array([0.380460502184482, 0.376772001481922]),
'3-LED-4 (455/547/623)':
np.array([0.417067943691045, 0.396276280071757]),
'4-LED No Yellow':
np.array([0.417060589301332, 0.396268153712350]),
'4-LED Yellow':
np.array([0.417069637940463, 0.396276766014859]),
'4-LED-1 (461/526/576/624)':
np.array([0.417067615440556, 0.396275056779587]),
'4-LED-2 (447/512/573/627)':
np.array([0.417071570560054, 0.396278745130373]),
'Luxeon WW 2880':
np.array([0.459088527920913, 0.432916480607903]),
'PHOS-1':
np.array([0.436443167801164, 0.404616033549917]),
'PHOS-2':
np.array([0.452704462198571, 0.437584543052711]),
'PHOS-3':
np.array([0.436899870751359, 0.404037372134463]),
'PHOS-4':
np.array([0.436936023906427, 0.404113558278629]),
'Phosphor LED YAG':
np.array([0.307761817314310, 0.325268939239941])
}))
"""
LED light source chromaticity coordinates from *NIST*
*NIST CQS simulation 7.4.xls* spreadsheet for the
*CIE 1931 2 Degree Standard Observer*. [2]_
LIGHT_SOURCES_NIST_LED_CIE_1931_2_DEGREE_STANDARD_OBSERVER :
**{'3-LED-1 (457/540/605)', '3-LED-2 (473/545/616)', '3-LED-2 Yellow',
'3-LED-3 (465/546/614)', '3-LED-4 (455/547/623)', '4-LED No Yellow',
'4-LED Yellow', '4-LED-1 (461/526/576/624)', '4-LED-2 (447/512/573/627)',
'Luxeon WW 2880', 'PHOS-1', 'PHOS-2', 'PHOS-3', 'PHOS-4',
'Phosphor LED YAG'}**
"""
LIGHT_SOURCES_NIST_LED_CIE_1964_10_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'3-LED-1 (457/540/605)':
np.array([0.425099988926548, 0.389451349911075]),
'3-LED-2 (473/545/616)':
np.array([0.422222118774217, 0.401298495594226]),
'3-LED-2 Yellow':
np.array([0.446222216139125, 0.441646464276087]),
'3-LED-3 (465/546/614)':
np.array([0.387470465801936, 0.376404716015666]),
'3-LED-4 (455/547/623)':
np.array([0.422865464107041, 0.388772240171637]),
'4-LED No Yellow':
np.array([0.419807532952439, 0.399465294930377]),
'4-LED Yellow':
np.array([0.422720601750053, 0.390284663473479]),
'4-LED-1 (461/526/576/624)':
np.array([0.423899783323037, 0.394170886226971]),
'4-LED-2 (447/512/573/627)':
np.array([0.421571042053867, 0.394089741928601]),
'Luxeon WW 2880':
np.array([0.466639299623263, 0.430817417218051]),
'PHOS-1':
np.array([0.440120001281140, 0.403135783393416]),
'PHOS-2':
np.array([0.461487398870558, 0.436150294667024]),
'PHOS-3':
np.array([0.440892655302172, 0.408662264402299]),
'PHOS-4':
np.array([0.441760443951475, 0.407267478268879]),
'Phosphor LED YAG':
np.array([0.312807834772696, 0.334180937864035])
}))
"""
LED light source chromaticity coordinates from *NIST*
*NIST CQS simulation 7.4.xls* spreadsheet for the
*CIE 1964 10 Degree Standard Observer*. [2]_
LIGHT_SOURCES_NIST_LED_CIE_1964_10_DEGREE_STANDARD_OBSERVER :
CaseInsensitiveMapping
**{'3-LED-1 (457/540/605)', '3-LED-2 (473/545/616)', '3-LED-2 Yellow',
'3-LED-3 (465/546/614)', '3-LED-4 (455/547/623)', '4-LED No Yellow',
'4-LED Yellow', '4-LED-1 (461/526/576/624)', '4-LED-2 (447/512/573/627)',
'Luxeon WW 2880', 'PHOS-1', 'PHOS-2', 'PHOS-3', 'PHOS-4',
'Phosphor LED YAG'}**
"""
LIGHT_SOURCES_NIST_PHILIPS_CIE_1931_2_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'60 A/W (Soft White)':
np.array([0.450730217519680, 0.408046128945005]),
'C100S54 (HPS)':
np.array([0.529231515407657, 0.411370164988427]),
'C100S54C (HPS)':
np.array([0.502380414374839, 0.415877299905475]),
'F32T8/TL830 (Triphosphor)':
np.array([0.443250764475753, 0.409523700296928]),
'F32T8/TL835 (Triphosphor)':
np.array([0.407150274569933, 0.393172743482571]),
'F32T8/TL841 (Triphosphor)':
np.array([0.385376686681605, 0.390370762102806]),
'F32T8/TL850 (Triphosphor)':
np.array([0.343768910392287, 0.358447436104108]),
'F32T8/TL865 /PLUS (Triphosphor)':
np.array([0.316368879615201, 0.345320790143017]),
'F34/CW/RS/EW (Cool White FL)':
np.array([0.377250931364378, 0.393087658636060]),
'F34T12/LW/RS /EW':
np.array([0.378863642993776, 0.394960629979820]),
'F34T12WW/RS /EW (Warm White FL)':
np.array([0.438466967656789, 0.408635441565706]),
'F40/C50 (Broadband FL)':
np.array([0.345836574973021, 0.361724450389430]),
'F40/C75 (Broadband FL)':
np.array([0.299966663385220, 0.316582165804824]),
'F40/CWX (Broadband FL)':
np.array([0.375037045754214, 0.360543952129462]),
'F40/DX (Broadband FL)':
np.array([0.311922310746537, 0.342802103417329]),
'F40/DXTP (Delux FL)':
np.array([0.313066543826958, 0.342225714484412]),
'F40/N (Natural FL)':
np.array([0.376878697365115, 0.354153458302878]),
'H38HT-100 (Mercury)':
np.array([0.311200590193641, 0.382944245857018]),
'H38JA-100/DX (Mercury DX)':
np.array([0.389791630360359, 0.373394688931767]),
'MHC100/U/MP /3K':
np.array([0.428581768670222, 0.388168915678330]),
'MHC100/U/MP /4K':
np.array([0.373145253482762, 0.371366990216717]),
'SDW-T 100W/LV (Super HPS)':
np.array([0.472339157938672, 0.407106330880316])
}))
"""
Philips light source chromaticity coordinates from *NIST*
*NIST CQS simulation 7.4.xls* spreadsheet for the
*CIE 1931 2 Degree Standard Observer*. [2]_
LIGHT_SOURCES_NIST_PHILIPS_CIE_1931_2_DEGREE_STANDARD_OBSERVER :
CaseInsensitiveMapping
**{'60 A/W (Soft White)', 'C100S54 (HPS)', 'C100S54C (HPS)',
'F32T8/TL830 (Triphosphor)', 'F32T8/TL835 (Triphosphor)',
'F32T8/TL841 (Triphosphor)', 'F32T8/TL850 (Triphosphor)',
'F32T8/TL865 /PLUS (Triphosphor)', 'F34/CW/RS/EW (Cool White FL)',
'F34T12/LW/RS /EW', 'F34T12WW/RS /EW (Warm White FL)',
'F40/C50 (Broadband FL)', 'F40/C75 (Broadband FL)',
'F40/CWX (Broadband FL)', 'F40/DX (Broadband FL)', 'F40/DXTP (Delux FL)',
'F40/N (Natural FL)', 'H38HT-100 (Mercury)', 'H38JA-100/DX (Mercury DX)',
'MHC100/U/MP /3K', 'MHC100/U/MP /4K', 'SDW-T 100W/LV (Super HPS)'}**
"""
LIGHT_SOURCES_NIST_PHILIPS_CIE_1964_10_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'60 A/W (Soft White)':
np.array([0.454365604973572, 0.406573684216774]),
'C100S54 (HPS)':
np.array([0.538554605063010, 0.402575827972962]),
'C100S54C (HPS)':
np.array([0.509663059970892, 0.409064508209193]),
'F32T8/TL830 (Triphosphor)':
np.array([0.448795219301811, 0.403574636091678]),
'F32T8/TL835 (Triphosphor)':
np.array([0.412082534290652, 0.388001071127592]),
'F32T8/TL841 (Triphosphor)':
np.array([0.390908619219527, 0.385290559992705]),
'F32T8/TL850 (Triphosphor)':
np.array([0.347882431257452, 0.355845742210551]),
'F32T8/TL865 /PLUS (Triphosphor)':
np.array([0.320698199593768, 0.343871441043854]),
'F34/CW/RS/EW (Cool White FL)':
np.array([0.386514853545337, 0.382843326097814]),
'F34T12/LW/RS /EW':
np.array([0.389628909159399, 0.382074721889904]),
'F34T12WW/RS /EW (Warm White FL)':
np.array([0.448395377616960, 0.395666643335296]),
'F40/C50 (Broadband FL)':
np.array([0.349880827196884, 0.360661316491439]),
'F40/C75 (Broadband FL)':
np.array([0.301988533872761, 0.318479025875818]),
'F40/CWX (Broadband FL)':
np.array([0.378502309910296, 0.356371890168937]),
'F40/DX (Broadband FL)':
np.array([0.316783037559153, 0.341749269085077]),
'F40/DXTP (Delux FL)':
np.array([0.318774745065791, 0.339798825605488]),
'F40/N (Natural FL)':
np.array([0.378833157741751, 0.350724402658646]),
'H38HT-100 (Mercury)':
np.array([0.326260627082484, 0.360001095895205]),
'H38JA-100/DX (Mercury DX)':
np.array([0.397058597517533, 0.356532431806974]),
'MHC100/U/MP /3K':
np.array([0.431422986591898, 0.380642213887539]),
'MHC100/U/MP /4K':
np.array([0.375707105948115, 0.366156465779779]),
'SDW-T 100W/LV (Super HPS)':
np.array([0.476461908192661, 0.402288012403575])
}))
"""
Philips light source chromaticity coordinates from *NIST*
*NIST CQS simulation 7.4.xls* spreadsheet for the
*CIE 1964 10 Degree Standard Observer*. [2]_
LIGHT_SOURCES_NIST_PHILIPS_CIE_1964_10_DEGREE_STANDARD_OBSERVER :
CaseInsensitiveMapping
**{'60 A/W (Soft White)', 'C100S54 (HPS)', 'C100S54C (HPS)',
'F32T8/TL830 (Triphosphor)', 'F32T8/TL835 (Triphosphor)',
'F32T8/TL841 (Triphosphor)', 'F32T8/TL850 (Triphosphor)',
'F32T8/TL865 /PLUS (Triphosphor)', 'F34/CW/RS/EW (Cool White FL)',
'F34T12/LW/RS /EW', 'F34T12WW/RS /EW (Warm White FL)',
'F40/C50 (Broadband FL)', 'F40/C75 (Broadband FL)',
'F40/CWX (Broadband FL)', 'F40/DX (Broadband FL)', 'F40/DXTP (Delux FL)',
'F40/N (Natural FL)', 'H38HT-100 (Mercury)', 'H38JA-100/DX (Mercury DX)',
'MHC100/U/MP /3K', 'MHC100/U/MP /4K', 'SDW-T 100W/LV (Super HPS)'}**
"""
LIGHT_SOURCES_COMMON_CIE_1931_2_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'Kinoton 75P': np.array([0.315252413629716, 0.332870794805328])
}))
"""
Common light source chromaticity coordinates for the
*CIE 1931 2 Degree Standard Observer*.
References
----------
:cite:`Houston2015a`
LIGHT_SOURCES_COMMON_CIE_1931_2_DEGREE_STANDARD_OBSERVER :
CaseInsensitiveMapping
**{'Kinoton 75P', }**
"""
LIGHT_SOURCES_COMMON_CIE_1964_10_DEGREE_STANDARD_OBSERVER = (
CaseInsensitiveMapping({
'Kinoton 75P': np.array([0.317086642148234, 0.336222428041514])
}))
"""
Common light source chromaticity coordinates for the
*CIE 1964 10 Degree Standard Observer*. [3_]
LIGHT_SOURCES_COMMON_CIE_1964_10_DEGREE_STANDARD_OBSERVER :
CaseInsensitiveMapping
**{'Kinoton 75P', }**
"""
LIGHT_SOURCES = CaseInsensitiveMapping({
'CIE 1931 2 Degree Standard Observer':
CaseInsensitiveMapping(
LIGHT_SOURCES_RIT_CIE_1931_2_DEGREE_STANDARD_OBSERVER),
'CIE 1964 10 Degree Standard Observer':
CaseInsensitiveMapping(
LIGHT_SOURCES_RIT_CIE_1964_10_DEGREE_STANDARD_OBSERVER)
})
LIGHT_SOURCES.__doc__ = """
Aggregated light source chromaticity coordinates.
LIGHT_SOURCES : CaseInsensitiveMapping
**{'CIE 1931 2 Degree Standard Observer',
'CIE 1964 10 Degree Standard Observer'}**
Aliases:
- 'cie_2_1931': 'CIE 1931 2 Degree Standard Observer'
- 'cie_10_1964': 'CIE 1964 10 Degree Standard Observer'
"""
LIGHT_SOURCES['cie_2_1931'] = (
LIGHT_SOURCES['CIE 1931 2 Degree Standard Observer'])
LIGHT_SOURCES['cie_10_1964'] = (
LIGHT_SOURCES['CIE 1964 10 Degree Standard Observer'])
LIGHT_SOURCES['CIE 1931 2 Degree Standard Observer'].update(
LIGHT_SOURCES_NIST_TRADITIONAL_CIE_1931_2_DEGREE_STANDARD_OBSERVER)
LIGHT_SOURCES['CIE 1964 10 Degree Standard Observer'].update(
LIGHT_SOURCES_NIST_TRADITIONAL_CIE_1964_10_DEGREE_STANDARD_OBSERVER)
LIGHT_SOURCES['CIE 1931 2 Degree Standard Observer'].update(
LIGHT_SOURCES_NIST_LED_CIE_1931_2_DEGREE_STANDARD_OBSERVER)
LIGHT_SOURCES['CIE 1964 10 Degree Standard Observer'].update(
LIGHT_SOURCES_NIST_LED_CIE_1964_10_DEGREE_STANDARD_OBSERVER)
LIGHT_SOURCES['CIE 1931 2 Degree Standard Observer'].update(
LIGHT_SOURCES_NIST_PHILIPS_CIE_1931_2_DEGREE_STANDARD_OBSERVER)
LIGHT_SOURCES['CIE 1964 10 Degree Standard Observer'].update(
LIGHT_SOURCES_NIST_PHILIPS_CIE_1964_10_DEGREE_STANDARD_OBSERVER)
LIGHT_SOURCES['CIE 1931 2 Degree Standard Observer'].update(
LIGHT_SOURCES_COMMON_CIE_1931_2_DEGREE_STANDARD_OBSERVER)
LIGHT_SOURCES['CIE 1964 10 Degree Standard Observer'].update(
LIGHT_SOURCES_COMMON_CIE_1964_10_DEGREE_STANDARD_OBSERVER)
| [
"thomas.mansencal@gmail.com"
] | thomas.mansencal@gmail.com |
6989372758828a2c53b0c7642fc6846fe7f79be8 | 1abc7d220beb38893dd8427428bc27d07e4b5ec9 | /lncrawl/core/downloader.py | a6f130b76f207df11074c94eedaba8a2b1451432 | [
"Apache-2.0"
] | permissive | huangmhao/lightnovel-crawler | 53448ec85faef4ec708af854e226f7f5f45e15d5 | d94a9a69c330999f03b5cd0da609f126a7136104 | refs/heads/master | 2020-05-23T11:24:59.513225 | 2019-05-14T16:52:25 | 2019-05-14T16:52:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,338 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
To download chapter bodies
"""
import json
import logging
import os
import traceback
from concurrent import futures
from urllib.parse import urlparse
from progress.bar import IncrementalBar
logger = logging.getLogger('DOWNLOADER')
def downlod_cover(app):
app.book_cover = None
if app.crawler.novel_cover:
logger.info('Getting cover image...')
try:
ext = urlparse(app.crawler.novel_cover).path.split('.')[-1]
filename = os.path.join(
app.output_path, 'cover.%s' % (ext or 'png'))
if not os.path.exists(filename):
logger.info('Downloading cover image')
response = app.crawler.get_response(app.crawler.novel_cover)
with open(filename, 'wb') as f:
f.write(response.content)
# end with
logger.info('Saved cover: %s', filename)
# end if
app.book_cover = filename
except Exception:
logger.debug(traceback.format_exc())
# end try
# end if
if not app.book_cover:
logger.warn('No cover image')
# end if
# end def
def download_chapter_body(app, chapter):
result = None
dir_name = os.path.join(app.output_path, 'json')
if app.pack_by_volume:
vol_name = 'Volume ' + str(chapter['volume']).rjust(2, '0')
dir_name = os.path.join(dir_name, vol_name)
# end if
os.makedirs(dir_name, exist_ok=True)
chapter_name = str(chapter['id']).rjust(5, '0')
file_name = os.path.join(dir_name, chapter_name + '.json')
chapter['body'] = ''
if os.path.exists(file_name):
logger.info('Restoring from %s', file_name)
with open(file_name, 'r') as file:
old_chapter = json.load(file)
chapter['body'] = old_chapter['body']
# end with
# end if
if len(chapter['body']) == 0:
body = ''
try:
logger.info('Downloading to %s', file_name)
body = app.crawler.download_chapter_body(chapter)
except Exception:
logger.debug(traceback.format_exc())
# end try
if len(body) == 0:
result = 'Body is empty: ' + chapter['url']
else:
chapter['body'] = '<h3>%s</h3><h1>%s</h1>\n%s' % (
chapter['volume_title'], chapter['title'], body)
# end if
with open(file_name, 'w') as file:
file.write(json.dumps(chapter))
# end with
# end if
return result
# end def
def download_chapters(app):
downlod_cover(app)
bar = IncrementalBar('Downloading chapters', max=len(app.chapters))
bar.start()
if os.getenv('debug_mode') == 'yes':
bar.next = lambda: None # Hide in debug mode
# end if
futures_to_check = {
app.crawler.executor.submit(
download_chapter_body,
app,
chapter,
): str(chapter['id'])
for chapter in app.chapters
}
app.progress = 0
for future in futures.as_completed(futures_to_check):
result = future.result()
if result:
bar.clearln()
logger.error(result)
# end if
app.progress += 1
bar.next()
# end for
bar.finish()
# end def
| [
"dipu.sudipta@gmail.com"
] | dipu.sudipta@gmail.com |
3762dbc45fe80b9bda29d70e5924efe2586c0d09 | 2a0c2b3b682fdc7a49ff2ea107f53ac4b8fb5d20 | /pyLean/designMode/singletonPattern/SingleObject.py | 171a6a018d0ba94d002919f2289a12e5ce0a2e4a | [] | no_license | NoobsZero/DesignMode | 29f8327c09ecd8f26e9fc3c8618e5fba3de712b2 | 161997377020436491520a10fc3ac927469458f1 | refs/heads/master | 2023-07-08T04:24:59.776257 | 2021-08-17T05:55:46 | 2021-08-17T05:55:46 | 303,366,863 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,628 | py | # -*- encoding:utf-8 -*-
"""
@File :SingleObject.py
@Time :2020/10/12 15:29
@Author :Chen
@Software:PyCharm
单例模式(Singleton)---创建型
一、单例模式的使用场景
(1)、当创建一个对象所占用的资源很多,但同时又需要使用到该对象
(2)、当堆系统内的资源要求统一读写时,比如读写的配置信息,此时必须要求创建的实例信息相同
(3)、当有多个实例可能会引起程序错误时
总结:单例模式适用于只需要创建一个实例对象,程序全部使用同一个实例对象
二、实现方法
根据使用场景提炼出要点:
(1)、某个类只能有一个实例
(2)、必须要自行创建实例
(3)、必须向整个系统提供这个实例
实现方法:
(1)、只提供私有的构造方法
(2)、含有一个该类的静态私有对象
(3)、要提供一个静态的公用方法用于获取、创建私有对象
根据上面的描述,提供了俩种实现单例模式的方法分别为饿汉式和懒汉式
饿汉式:简单来说就是空间换时间,因为上来就实例化一个对象,占用了内存,(也不管你用还是不用)
懒汉式:简单的来说就是时间换空间,与饿汉式正好相反
"""
# 饿汉式
class EagerSingleton(object):
"""
饿汉式单例模式
实现预先加载,急切初始化,单例对象在类实例化前创建。
优点:
1、线程安全
2、在类实例化前已经创建好了一个静态对象,调用时反应速度快
3、直接执行其它方法或静态方法时,单例实例不会被初始化
缺点:
1、不管使用与否,实例化前就初始化静态对象,有点资源浪费。
"""
# 重写创建实例的__new__方法
_instance = None
def __new__(cls, *args, **kwargs):
# 如果类没有实例属性,进行实例化,否则返回实例
if not hasattr(cls, '_instance'):
cls.instance = super(EagerSingleton, cls).__new__(cls)
return cls._instance
@classmethod
def get_instance(cls):
return cls._instance
# 懒汉式
class LazySingleton(object):
"""
懒汉式单例模式
只有在使用时才创建单例对象,实例化时不创建。
优点:
1、资源利用合理,不调用get_instance方法不创建单例对象
缺点:
1、线程不安全,多线程时可能会获取到不同单例对象的情况。
解决办法是加互斥锁,但会降低效率
"""
__instance = None
def __init__(self):
if not self.__instance:
print('调用__init__,实例未创建')
else:
print('调用__init__,实例已经创建过了:', self.__instance)
@classmethod
def get_instance(cls):
# 调用get_instance类方法的时候才会生成Singleton实例
if not cls.__instance:
cls.__instance = LazySingleton()
return cls.__instance
"""
单例模式
由于Python语言中并没有构造函数私有化的手段,所以要使用另外的策略。
Python语言在构造新对象时要先调用__new__方法取得内存空间,然后调用__init__方法初始化该空间。
因此,在Python语言中为保证只生成一个实例,实现单例的方式就是重写__new__方法,通过这种方式实现的单例类Singleton。
"""
# Python中 _ 和 __ 的含义
# 在python的类中,没有真正的私有化,不管是方法还是属性,
# 为了编程的需要,约定加了下划线 _ 的属性和方法不属于API,不应该在类的外面访问,也不会被from M import * 导入。
# _*:类的私有属性或方法:不建议在类的外面直接调用这个属性或方法,但是也可以调用
# __*:python中的__和一项称为name mangling的技术有关,name mangling (又叫name decoration命名修饰).
# 在很多现代编程语言中,这一技术用来解决需要唯一名称而引起的问题,比如命名冲突/重载等.
# 单例类Singleton通过重写静态__new__方法来实现实例生成过程的控制。用户无论构建多少次该类的对象都会返回同一个结果。
if __name__ == '__main__':
# s3 = LazySingleton()
# s1 = LazySingleton.get_instance()
# s2 = LazySingleton.get_instance()
# print(id(s1), id(s2))
c1 = EagerSingleton()
c2 = EagerSingleton()
print(id(c1), id(c2))
| [
"870628995@qq.com"
] | 870628995@qq.com |
472834b4d7bf4d9680788d1d846bf3e5140eae51 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/225/users/3999/codes/1671_1105.py | 63959a8ddb44b8df9cb9991080829476eb2151b3 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 602 | py | # Teste seu código aos poucos. Não teste tudo no final, pois fica mais difícil de identificar erros.
# Ao testar sua solução, não se limite ao caso de exemplo. Teste as diversas possibilidades de saída
X=input()
print("Entrada:",X)
if(X=="lobo"):
Y="Stark"
elif(X=="leao"):
Y="Lannister"
elif(X=="veado"):
Y="Baratheon"
elif(X=="dragao"):
Y="Targaryen"
elif(X=="rosa"):
Y="Tyrell"
elif(X=="sol"):
Y="Martell"
elif(X=="lula"):
Y="Greyjoy"
elif(X=="esfolado"):
Y="Bolton"
elif(X=="turta"):
Y="Tully"
else:
Y="Brasao invalido"
if(Y!="Brasao invalido"):
print("Casa:",Y)
else:
print(Y)
| [
"jvlo@icomp.ufam.edu.br"
] | jvlo@icomp.ufam.edu.br |
8569eaedce6a31b10c11a8aefda9b425e5200758 | bfda3af75d94767a5cb265bd68c17cfbf94e3ee1 | /euler/problem067.py | 44e0d08738522bf8639fb9237bda997501d62b89 | [] | no_license | orenlivne/euler | d0e5b956a46eacfe423fbd6c52918beb91eea140 | 2afdd8bccdc5789c233e955b1ca626cea618eb9b | refs/heads/master | 2020-12-29T02:24:36.479708 | 2016-12-15T21:27:33 | 2016-12-15T21:27:33 | 20,263,482 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,089 | py | '''
============================================================
http://projecteuler.net/problem=67
By starting at the top of the triangle below and moving to adjacent numbers on the row below, the maximum total from top to bottom is 23.
3
7 4
2 4 6
8 5 9 3
That is, 3 + 7 + 4 + 9 = 23.
Find the maximum total from top to bottom in triangle.txt (right click and 'Save Link/Target As...'), a 15K text file containing a triangle with one-hundred rows.
NOTE: This is a much more difficult version of Problem 18. It is not possible to try every route to solve this problem, as there are 2^99 altogether! If you could check one trillion (1012) routes every second it would take over twenty billion years to check them all. There is an efficient algorithm to solve it. ;o)
Created on Feb 21, 2013
@author: Oren Livne <livne@uchicago.edu>
============================================================
'''
import numpy as np, sys, urllib2
from numpy.ma.testutils import assert_equal
def read_triangle(f):
'''Read triangle data from input Stream. Stream rows.'''
return (np.array(map(int, line.split(' ')), dtype=np.int) for line in f)
def max_total(triangle):
'''Max total of path in a triangle, top to bottom. In-place, saves one array.'''
MIN_INT = -sys.maxint - 1
for a in triangle: # Loop over rows, top to bottom
if len(a) > 1: # Initial condition
a = np.maximum(np.concatenate(([MIN_INT], prev)), #@UndefinedVariable
np.concatenate((prev, [MIN_INT]))) + a # Recursion @UndefinedVariable
prev = a
return np.max(prev) # Final condition
if __name__ == "__main__":
try:
# If Internet connection available
assert_equal(max_total(read_triangle(urllib2.urlopen('http://projecteuler.net/project/triangle.txt'))), 7273, 'Wrong sum')
except urllib2.URLError:
# If downloaded locally
assert_equal(max_total(read_triangle(urllib2.urlopen('problem067.dat', 'rb'))), 7273, 'Wrong sum')
| [
"oren.livne@gmail.com"
] | oren.livne@gmail.com |
fd7d6c7e681e3fd3e2c9f35e3d6d751638a4838b | d08802e22fd87494dce79f811439a7a19b6df1da | /src/sniffMyPackets/transforms/pcapture.py | e47d07f0b90c9652ff40aa71cd716a6470232a1b | [] | no_license | hcit/sniffMyPackets | e6c9597b9e237f0d45ef3242ee82e09ee5785f0e | 391c1f4707946d3a456a7d9b3e639bda6d88dff8 | refs/heads/master | 2021-01-19T05:24:44.476422 | 2013-04-18T12:53:03 | 2013-04-18T12:53:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,386 | py | #!/usr/bin/env python
import logging, hashlib
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy.all import *
from time import time
from common.entities import Interface, pcapFile
#from canari.maltego.utils import debug, progress
from canari.framework import configure #, superuser
__author__ = 'catalyst256'
__copyright__ = 'Copyright 2013, Sniffmypackets Project'
__credits__ = []
__license__ = 'GPL'
__version__ = '0.1'
__maintainer__ = 'catalyst256'
__email__ = 'catalyst256@gmail.com'
__status__ = 'Development'
__all__ = [
'dotransform'
]
#@superuser
@configure(
label='Sniff Packets [pcap]',
description='Sniffs packets on interface and saves to file',
uuids=[ 'sniffMyPackets.v2.interface2pcap' ],
inputs=[ ( 'sniffMyPackets', Interface ) ],
debug=True
)
def dotransform(request, response):
interface = request.value
tstamp = int(time())
fileName = '/tmp/'+str(tstamp)+'.pcap'
if 'sniffMyPackets.count' in request.fields:
pktcount = int(request.fields['sniffMyPackets.count'])
else:
pktcount = 300
pkts = sniff(iface=interface, count=pktcount)
wrpcap(fileName, pkts)
sha1hash = ''
fh = open(fileName, 'rb')
sha1hash = hashlib.sha1(fh.read()).hexdigest()
e = pcapFile(fileName)
e.sha1hash = sha1hash
response += e
return response
| [
"catalyst256@gmail.com"
] | catalyst256@gmail.com |
9eb2dbbb86324a998b932f17626c77a7c4c9a350 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/Clutter/Vertex.py | 5861047599940d3884e334b4a5c854d0dbff004d | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 6,063 | py | # encoding: utf-8
# module gi.repository.Clutter
# from /usr/lib64/girepository-1.0/Clutter-1.0.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.Atk as __gi_repository_Atk
import gi.repository.GObject as __gi_repository_GObject
import gobject as __gobject
class Vertex(__gi.Boxed):
"""
:Constructors:
::
Vertex()
alloc() -> Clutter.Vertex
new(x:float, y:float, z:float) -> Clutter.Vertex
"""
def alloc(self): # real signature unknown; restored from __doc__
""" alloc() -> Clutter.Vertex """
pass
def copy(self): # real signature unknown; restored from __doc__
""" copy(self) -> Clutter.Vertex """
pass
def equal(self, vertex_b): # real signature unknown; restored from __doc__
""" equal(self, vertex_b:Clutter.Vertex) -> bool """
return False
def free(self): # real signature unknown; restored from __doc__
""" free(self) """
pass
def init(self, x, y, z): # real signature unknown; restored from __doc__
""" init(self, x:float, y:float, z:float) -> Clutter.Vertex """
pass
def new(self, x, y, z): # real signature unknown; restored from __doc__
""" new(x:float, y:float, z:float) -> Clutter.Vertex """
pass
def _clear_boxed(self, *args, **kwargs): # real signature unknown
pass
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
x = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
y = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
z = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(Vertex), '__module__': 'gi.repository.Clutter', '__gtype__': <GType ClutterVertex (94911696720576)>, '__dict__': <attribute '__dict__' of 'Vertex' objects>, '__weakref__': <attribute '__weakref__' of 'Vertex' objects>, '__doc__': None, 'x': <property object at 0x7f54134ef180>, 'y': <property object at 0x7f54134ef270>, 'z': <property object at 0x7f54134ef360>, 'alloc': gi.FunctionInfo(alloc), 'new': gi.FunctionInfo(new), 'copy': gi.FunctionInfo(copy), 'equal': gi.FunctionInfo(equal), 'free': gi.FunctionInfo(free), 'init': gi.FunctionInfo(init)})"
__gtype__ = None # (!) real value is '<GType ClutterVertex (94911696720576)>'
__info__ = StructInfo(Vertex)
| [
"ttys3@outlook.com"
] | ttys3@outlook.com |
fb3f9d1d1b7a4a7ee78c02e9baba723d81209eff | 3a9f2b3d79cf214704829427ee280f4b49dca70a | /saigon/rat/contrib/wlandemo/WGconfig_ref9.py | 8d69f03892e973bd16833e31f617a4aa5df80f80 | [] | no_license | jichunwei/MyGitHub-1 | ae0c1461fe0a337ef459da7c0d24d4cf8d4a4791 | f826fc89a030c6c4e08052d2d43af0b1b4b410e3 | refs/heads/master | 2021-01-21T10:19:22.900905 | 2016-08-20T03:34:52 | 2016-08-20T03:34:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,070 | py | import time
import logging
import csvUtil
import WGconfig_cfgLib_ref6 as BCLIB
import defaultWlanConfigParams_ref4 as DWCFG
from RuckusAutoTest.components.RemoteStationWinPC import RemoteStationWinPC as RPC
from RuckusAutoTest.common.SeleniumControl import SeleniumManager
from RuckusAutoTest.components.ZoneDirector import ZoneDirector
###
### Testbed data base
###
# provide 1st name of wlan-group
mytb = {'wgs_name': 'MultiWgsHelpQaWorkOnRuckus'}
# provide Guest & Hotspot Wlan names
#mytb['wlans'] = ['guest-1','guest-2','guest-3','hotspot-2','hotspot-3']
mytb['wlans'] = ['DPSK']
# 6 wlans and encryption modes; please refer to defaultWlanConfigParams for details
#mytb['wlans'] = ['psk-wpa2-tkip-zeroIT-Dpsk-T','psk-wpa2-tkip-zeroIT-T','psk-wpa2-aes', 'psk-wpa2-tkip', 'share-wep128','open-wep128']
acl_conf = {'acl_name':'TestForFunL2ACL', 'description':'L2AccessControl', 'allowed_access': True, 'mac_list':['00:00:00:00:11:00', '00:22:43:13:39:30', '00:21:6A:2B:34:E2', '00:22:43:13:4B:2B']}
L3acl_conf = {'name':'TestForFunL3ACL', 'description':'L3L4AcessControl', 'default_mode':'allow-all', 'rules':[]}
L3acl_conf['rules'] = [{'description':'SNMP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'SNMP'},
{'description':'HTTP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'HTTP'},
{'description':'HTTPS-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'HTTPS'},
{'description':'FTP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'FTP'},
{'description':'SSH-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'SSH'},
{'description':'TELNET-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'TELNET'},
{'description':'SMTP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'SMTP'},
{'description':'DHCP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'DHCP'},
{'description':'DNS-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'DNS'},
{'description':'Any-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':''},
{'description':'protocol-ACLRuleNTP', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'protocol':'17', 'dst_port':'123'},
{'description':'protocol-ACLRuleTFTP', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'protocol':'17', 'dst_port':'69'},
{'description':'SNMP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'SNMP'},
{'description':'HTTP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'HTTP'},
{'description':'HTTPS-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'HTTPS'},
{'description':'FTP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'FTP'},
{'description':'SSH-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'SSH'},
{'description':'TELNET-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'TELNET'},
{'description':'SMTP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'SMTP'},
{'description':'DHCP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'DHCP'},
{'description':'DNS-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'DNS'},
{'description':'Any-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':''},
{'description':'protocol-ACLRuleNTP', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'protocol':'17', 'dst_port':'123'},
{'description':'protocol-ACLRuleTFTP', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'protocol':'17', 'dst_port':'69'},
{'description':'SNMP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'SNMP'},
{'description':'HTTP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'HTTP'},
{'description':'HTTPS-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'HTTPS'},
{'description':'FTP-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'FTP'},
{'description':'SSH-ACLRule', 'action':'Deny', 'dst_addr':'192.168.0.2/24', 'application':'SSH'}, ]
wispr_params = {'username':'jchu', 'password':'happy', 'redirect_url':'http://192.168.0.202', 'original_url':'http://192.168.0.210'}
guest_params = {'guest_pass':'', 'use_tou':True, 'redirect_url':'http://192.168.0.202'}
zeroit_params = {'eth_if_ip_addr':'192.168.1.12', 'ip_addr':'192.168.0.112',
'net_mask':'255.255.255.0', 'auth_method':'', 'use_radius':'', 'activate_url':'http://192.168.0.2/activate',
'username':'TestingisFunJob', 'password':'TestingisFunJob', 'ssid':'DPSK'}
def create_zd(conf):
cfg = dict(
ip_addr = '192.168.0.2',
username = 'admin',
password = 'admin',
model = 'zd',
browser_type = 'firefox',
)
cfg.update(conf)
logging.info("Starting up ZoneDirector [%s]" % cfg['ip_addr'])
zd = ZoneDirector(cfg)
zd.start()
return zd
#Test client association w/ ZD in round robin fashion w/ different wlans detected by remote Clients
#This method can test whatever wlans are configured on ZD
def do_station1_test(**kwargs):
fcfg = dict(sta_ip_addr = '192.168.1.10', sleep = 10, repeat = 1, debug = False)
fcfg.update(kwargs)
halt(fcfg['debug'])
sta = RPC(dict(sta_ip_addr = fcfg['sta_ip_addr']))
sta.do_cmd('get_current_status')
sta.do_cmd('remove_all_wlan')
for r in range(1, int(fcfg['repeat']) + 1):
for wlan_id in mytb['wlans']:
sta.do_cmd('check_ssid', {'ssid': wlan_id})
wlan_conf = DWCFG.get_cfg(wlan_id)
time.sleep(fcfg['sleep'])
sta.get_current_status()
for i in range(10):
sta.cfg_wlan(wlan_conf)
wlan_ip = sta.get_wifi_addresses()
if wlan_ip[0] not in ['0.0.0.0', '']: break
time.sleep(10)
if wlan_id in ['guest-1', 'guest-2', 'guest-3']:
for c in range(1, 10):
try:
sta.do_cmd("perform_guest_auth", str(guest_params))
time.sleep(5)
except:
pass
if wlan_id in ['hotspot-1', 'hotspot-2', 'hotspot-3']:
for c in range(1, 10):
try:
sta.do_cmd("perform_hotspot_auth", str(wispr_params))
time.sleep(5)
except:
pass
#sta.ping(client_ip, timeout_ms=3000)
sta.do_cmd('remove_all_wlan')
return 0
# Test multiple users w/ ZeroIT configurations
def do_sta1_zeroit_test(zd, **kwargs):
fcfg = dict(sta_ip_addr = '192.168.1.10', sleep = 10, repeat = 1, debug = False)
fcfg.update(kwargs)
halt(fcfg['debug'])
sta = RPC(dict(sta_ip_addr = fcfg['sta_ip_addr']))
zd_user = BCLIB.get_zd_user(zd)
for r in range(1, int(fcfg['repeat']) + 1):
for wlan_id in mytb['wlans']:
sta.do_cmd('check_ssid', {'ssid': wlan_id})
for user in zd_user:
zeroit_params['username'] = user
zeroit_params['password'] = user
sta.do_cmd('get_current_status')
sta.do_cmd('remove_all_wlan')
sta.do_cmd("cfg_wlan_with_zero_it", str(zeroit_params))
time.sleep(fcfg['sleep'])
sta.get_current_status()
for i in range(10):
wlan_ip = sta.get_wifi_addresses()
if wlan_ip[0] not in ['0.0.0.0', '']:
break
time.sleep(10)
client_ip = wlan_ip[0]
sta.ping(client_ip, timeout_ms = 3000)
logging.info("ZeroIT User Name %s" % (zeroit_params['username']))
sta.do_cmd('remove_all_wlan')
return 0
# Convert batch_dpsk.csv file into dictionary and then send each dpsk to Client for association process
def do_sta1_batch_psk_test(**kwargs):
fcfg = dict(sta_ip_addr = '192.168.1.12', sleep = 10, repeat = 1, debug = False)
fcfg.update(kwargs)
halt(fcfg['debug'])
sta = RPC(dict(sta_ip_addr = fcfg['sta_ip_addr']))
sta.do_cmd('get_current_status')
sta.do_cmd('remove_all_wlan')
for r in range(1, int(fcfg['repeat']) + 1):
for wlan_id in mytb['wlans']:
sta.do_cmd('check_ssid', {'ssid': wlan_id})
csvid = csvUtil.csv_as_list_dict("batch_dpsk_102409_16_06.csv")
for raw in csvid:
logging.info("User Name %s" % (raw['User Name']))
wlan_conf = DWCFG.get_cfg(wlan_id)
wlan_conf['key_string'] = raw['Passphrase']
sta.cfg_wlan(wlan_conf)
time.sleep(fcfg['sleep'])
sta.get_current_status()
for i in range(10):
wlan_ip = sta.get_wifi_addresses()
if wlan_ip[0] not in ['0.0.0.0', '']:
break
time.sleep(10)
wlan_ip = sta.get_wifi_addresses()
clientIp = wlan_ip[0]
sta.ping(clientIp, timeout_ms = 3000)
sta.do_cmd('remove_all_wlan')
return 0
def do_station2_test(**kwargs):
fcfg = dict(sta_ip_addr = '192.168.2.12', sleep = 10, repeat = 1, debug = False)
fcfg.update(kwargs)
halt(fcfg['debug'])
sta = RPC(dict(sta_ip_addr = fcfg['sta_ip_addr']))
sta.do_cmd('get_current_status')
sta.do_cmd('remove_all_wlan')
for r in range(1, int(fcfg['repeat']) + 1):
for wlan_id in mytb['wlans']:
sta.do_cmd('check_ssid', {'ssid': wlan_id})
wlan_conf = DWCFG.get_cfg(wlan_id)
sta.cfg_wlan(wlan_conf)
time.sleep(fcfg['sleep'])
sta.get_current_status()
wlan_ip = sta.get_wifi_addresses()
client_ip = wlan_ip[0]
sta.ping(client_ip, timeout_ms = 3000)
sta.do_cmd('remove_all_wlan')
return 0
def halt(debug = False):
if debug:
import pdb
pdb.set_trace()
def do_cfg(**kwargs):
fcfg = dict(debug = False, do_config = True, username = 'admin', password = 'admin')
fcfg.update(kwargs)
halt(fcfg['debug'])
zd = create_zd(fcfg)
if not fcfg['do_config']:
return zd
#try:
# BCLIB.remove_wlan_config(zd)
#except:
# pass
#BCLIB.create_l2_acl_policy(zd, acl_conf, num_of_acl=1)
# Add ACL Clone function to speed up Max. ACL configurations
#BCLIB.create_l2_acl_clone(zd, acl_conf, num_of_acl=30)
#BCLIB.create_l3_acl_policy(zd, L3acl_conf, num_of_acl=3)
#BCLIB.create_l3_acl_clone(zd, L3acl_conf, num_of_acl=31)
#BCLIB.create_users(zd, username='TestingisFunJob', password='TestingisFunJob', fullname='TestingisFunJob', number_of_users=10)
#BCLIB.create_wlans(zd, mytb)
#BCLIB.create_wlan_group(zd, mytb)
#BCLIB.create_multi_wlan_groups(zd, mytb)
#BCLIB.align_wlan_group_sn_wlan(zd, mytb)
return zd
# configure every AP on ZD w/ different WlanGroup
def do_test(zd, **kwargs):
fcfg = dict(debug = False, sleep = 3, repeat = 1)
fcfg.update(kwargs)
halt(fcfg['debug'])
wgs_list = BCLIB.get_wlan_groups_list(zd)
ap_xs_list = BCLIB.get_ap_xs_info(zd)
for p in range(1, int(fcfg['repeat']) + 1):
for wgs_name in wgs_list:
desc = "Update %03d wgs=%s" % (p, wgs_name)
for ap_xs0 in ap_xs_list:
ap_xs1 = BCLIB.update_ap_xs_info_ch(zd, ap_xs0, desc, wgs_name)
time.sleep(fcfg['sleep'])
#wgs_name = 'Default'
#desc = "Update %03d wgs=%s" % (p, wgs_name)
#for ap_xs0 in ap_xs_list:
# ap_xs1 = BCLIB.update_ap_xs_info(zd, ap_xs0, desc, wgs_name)
# time.sleep(fcfg['sleep'])
return 0
def do_cleanup(zd, debug = False):
halt(debug)
BCLIB.remove_wlan_config(zd)
# Usage:
#
# tea.py contrib.wlandemo.WGconfig_ref repeat = 1 do_cleanup = False
# tea.pgetL3AclRuleList(zd, L3acl_conf)y bugs.bug6099 repeat=50 debug=True
# tea.py bugs.bug6099 ip_addr=192.168.2.2
#
def main(**kwargs):
fcfg = dict(debug = False, repeat = 1, sleep = 5, do_config = True, do_cleanup = False)
fcfg.update(kwargs)
sm = SeleniumManager()
fcfg.update({'selenium_mgr': sm})
zd = do_cfg(**fcfg)
try:
#do_test(zd,**kwargs)
#while True:
#do_station1_test(**kwargs)
#do_station2_test(**kwargs)
#do_sta1_batch_psk_test(**kwargs)
do_sta1_zeroit_test(zd, **kwargs)
if fcfg['do_cleanup']:
do_cleanup(zd, debug = fcfg['debug'])
except Exception, ex:
sm.shutdown()
return ('FAIL', mytb, {'error': ex.message})
sm.shutdown()
return ('PASS', mytb)
| [
"tan@xx.com"
] | tan@xx.com |
7761132b27a0670dcab85da3b9ff48acc7fbbf81 | c06cab774533cbbc3fdcd27b0e7ce825aa4984cf | /scripts/motif_training/ex_fimo.py | b21ca2bcca7d85b455b31f2c81747e238601f714 | [] | no_license | cbragdon93/compbio_project2016 | 4dbaf16a9d11d1216f7063e247bbf74e1c9b1aca | cc72fb3252b940b9fbcd92bf6839c388cbe28106 | refs/heads/master | 2021-06-08T21:12:13.819895 | 2016-12-02T21:57:36 | 2016-12-02T21:57:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 955 | py | """Script to run fimo on the entire genome/sequences"""
from subprocess import call
from itertools import chain
def run_fimo(meme_path, motif_file, sequence_file, op_folder, options):
"""Run fimo with the given options"""
command = [meme_path + './fimo']
others = list(chain(*zip(options.keys(), options.values())))
command += others
files = ['--oc', op_folder]
command += files
inputs = [motif_file, sequence_file]
command += inputs
shell_command = ' '.join(command)
print(shell_command)
call(shell_command, shell=True)
return None
def ex_fimo(meme_path, motif_file, sequence_file, op_folder, motif_id, thresh=0.0001):
options = {"--thresh": str(thresh), "--verbosity": str(1), "--motif": motif_id}
fimo_dir = '/'.join(op_folder.split('/')[:-1])
try:
call('mkdir '+fimo_dir, shell=True)
except:
pass
run_fimo(meme_path, motif_file, sequence_file, op_folder, options)
| [
"k.dileep1994@gmail.com"
] | k.dileep1994@gmail.com |
e1416db5c7ebb0645e06c66131b32c7ad64a6c08 | ddb3cae0628dc37cac63ebc1b6ebedea58c7fc8f | /0x05-python-exceptions/1-safe_print_integer.py | f090b0a5fa4d22922d7f499da6905e8454639614 | [] | no_license | ikki2530/holbertonschool-higher_level_programming | 1df42d49d7b4cf985d53bd5d2a1e55a9b90a195c | f0649aae6f2d96ea7e7c9a6e2e05a96177abb40e | refs/heads/master | 2022-12-20T11:13:29.263591 | 2020-09-26T14:20:02 | 2020-09-26T14:20:02 | 259,413,777 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | #!/usr/bin/python3
def safe_print_integer(value):
try:
val = isinstance(value, int)
if val:
print("{:d}".format(value))
return True
raise ValueError
except ValueError:
return False
| [
"dagomez2530@gmail.com"
] | dagomez2530@gmail.com |
3b6e37d389894b053906c58a8a0628422c2ee636 | 994c1b533fe64265715231d6458a9f316ce82cbf | /posts/migrations/0004_auto_20200723_0024.py | 0fe68c0368a1f6983003954c66d29b11a52904fc | [] | no_license | zerobubus/hw05_final | 07320a8fcc1119f28cf3bd5e3285d381470f7f47 | ed89bdd5fa088c71bdb127a780fc2eac3bcda47c | refs/heads/master | 2022-11-24T23:09:29.335507 | 2020-07-29T16:49:10 | 2020-07-29T16:49:10 | 282,685,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,370 | py | # Generated by Django 2.2.9 on 2020-07-23 00:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('posts', '0003_auto_20200702_1838'),
]
operations = [
migrations.AlterField(
model_name='group',
name='description',
field=models.TextField(max_length=100, verbose_name='описание'),
),
migrations.AlterField(
model_name='group',
name='title',
field=models.CharField(max_length=200, verbose_name='название'),
),
migrations.AlterField(
model_name='post',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='authors', to=settings.AUTH_USER_MODEL, verbose_name='автор'),
),
migrations.AlterField(
model_name='post',
name='group',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='posts', to='posts.Group', verbose_name='группа'),
),
migrations.AlterField(
model_name='post',
name='text',
field=models.TextField(verbose_name='текст'),
),
]
| [
"spb.etr@yandex.ru"
] | spb.etr@yandex.ru |
fb12c9895eecf78e4d6b1944e5c87b2610b09ca2 | 1bb6c157928441a7103ee42836c44657774049fe | /app1/utils/image_url.py | 05a2cfc9be6ca055526a25b85a14eac8b366fe88 | [
"MIT"
] | permissive | xieyu-aa/news | 525158dfdbee5fb5fc00a5a8d085f3ba61d3ec5e | 3c74fe43690fbfd7271a3f58c706ac722ecad619 | refs/heads/main | 2023-02-03T10:15:16.226983 | 2020-12-28T08:47:06 | 2020-12-28T08:47:06 | 319,926,795 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 766 | py | from qiniu import Auth, put_data
#需要填写你的 Access Key 和 Secret Key
access_key = 'QdYihoHdNftdfFtSdMr7ezeJb781HIh_FR-vxFdU'
secret_key = 'nLqJN9N3YR2NEx-Ngev3XMERt696ttpqA7SeM0lZ'
def image_url(image_data):
#构建鉴权对象
q = Auth(access_key, secret_key)
#要上传的空间
bucket_name = 'new3333'
#上传后保存的文件名
key = None
# 处理上传结果
token = q.upload_token(bucket_name, key, 3600)
ret, info = put_data(token, key, image_data)
print(ret)
print(info)
if info.status_code == 200:
return ret.get('key')
else:
return None
if __name__ == '__main__':
with open('./滑稽.jpg', 'rb') as f:
image_data = f.read()
image_url(image_data)
| [
"you@example.com"
] | you@example.com |
8a3e125e2cdcc5e0e1da6a1bc5c0231997aa01c6 | de77d2ebb336a32149bd8a9a3d4d50018f264c3b | /melange/examples/doc_examples/tutorial/consume.py | 70e4190df7c578428a85bd102cf7bc3ba32d7cce | [
"MIT"
] | permissive | Rydra/melange | 34c87b3f9dc726a6463ffa9f886dd243193d9a9b | a313a956b3442d62887b8a5ec35fcc6cd5d1eca2 | refs/heads/main | 2023-02-04T20:40:54.551991 | 2023-01-29T18:39:09 | 2023-01-29T18:39:09 | 101,487,734 | 10 | 3 | MIT | 2022-05-21T20:53:46 | 2017-08-26T13:33:26 | Python | UTF-8 | Python | false | false | 1,017 | py | from simple_cqrs.domain_event import DomainEvent
from melange import SimpleMessageDispatcher, SingleDispatchConsumer, consumer
from melange.backends import LocalSQSBackend
from melange.examples.doc_examples.tutorial.publish import MyTestMessage
from melange.serializers import PickleSerializer, SerializerRegistry
class MyConsumer(SingleDispatchConsumer):
@consumer
def on_my_test_message_received(self, event: MyTestMessage) -> None:
print(event.message)
if __name__ == "__main__":
serializer_settings = {
"serializers": {"pickle": PickleSerializer},
"serializer_bindings": {DomainEvent: "pickle"},
}
serializer_registry = SerializerRegistry(serializer_settings)
backend = LocalSQSBackend(host="localhost", port=9324)
consumer = MyConsumer()
message_dispatcher = SimpleMessageDispatcher(
consumer,
serializer_registry,
backend=backend,
)
print("Consuming...")
message_dispatcher.consume_loop("melangetutorial-queue")
| [
"davigetto@gmail.com"
] | davigetto@gmail.com |
b1ccd66c449e978e898770fd5bab213e3f2eb270 | af8c90f4a1387218257f80fb33de8845dec8ed9b | /venv/bin/flask | f9930785b6de633ecb3c954007c87cb2be7b14a2 | [] | no_license | CorneliaKelinske/microblog | 96f663e1cd00c9b303e7630ab472579d1a3e24a0 | b21a398868dce6f61967f3b834337a197fe6f05a | refs/heads/master | 2023-02-11T00:42:28.666723 | 2021-01-01T14:59:45 | 2021-01-01T14:59:45 | 284,807,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | #!/home/corneliakelinske/connie_codes/microblog/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"corneliakelinske@gmail.com"
] | corneliakelinske@gmail.com | |
e3508f2b0525429d3b32209655d8a032b09963e0 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2521/60765/284939.py | ac29a1ddd9459bb309375c78dd57491cf7eaf91b | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 811 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import math
import sys
import re
import collections
def solve():
# =list(map(int,input().split()))
# =int(input())
# n =input()[2:-2].split('],[')
# target=int(input())
n=list(map(int,input()[1:-1].split(',')))
n=collections.Counter(n)
res=[]
count=[]
for i in n:
count.append([i,n[i]])
count.sort(reverse=True,key=lambda x:x[1])
while len(count)>1:
res.append(count[0][0])
count[0][1]-=1
res.append(count[1][0])
count[1][1] -= 1
if count[1][1] == 0:
count.pop(1)
if count[0][1] == 0:
count.pop(0)
count.sort(reverse=True, key=lambda x: x[1])
if len(count)>0:
res.append(count[0][0])
print(res)
solve() | [
"1069583789@qq.com"
] | 1069583789@qq.com |
f97ebb63ac535601bd997690464c36249a17b102 | c3649aec8b628cf39f30c9440423ecbb8a9bc3aa | /src/bentoml/_internal/configuration/helpers.py | 24a1c6343fc793edd731cdecc837867a2e4bee88 | [
"Apache-2.0"
] | permissive | parano/BentoML | 2488ad1baa3f948f925edbe6b0eb2ea458bdad17 | eaa6218eb805acd6016eb140a4e3a9d6818dd995 | refs/heads/main | 2023-07-07T06:34:41.571577 | 2023-03-14T08:07:32 | 2023-03-14T08:07:32 | 178,978,356 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,657 | py | from __future__ import annotations
import os
import socket
import typing as t
import logging
from typing import TYPE_CHECKING
from functools import singledispatch
import yaml
import schema as s
from ..utils import LazyLoader
from ...exceptions import BentoMLConfigException
if TYPE_CHECKING:
from types import ModuleType
logger = logging.getLogger(__name__)
TRACING_TYPE = ["zipkin", "jaeger", "otlp", "in_memory"]
def import_configuration_spec(version: int) -> ModuleType: # pragma: no cover
return LazyLoader(
f"v{version}",
globals(),
f"bentoml._internal.configuration.v{version}",
exc_msg=f"Configuration version {version} does not exist.",
)
@singledispatch
def depth(_: t.Any, _level: int = 0): # pragma: no cover
return _level
@depth.register(dict)
def _(d: dict[str, t.Any], level: int = 0, **kw: t.Any):
return max(depth(v, level + 1, **kw) for v in d.values())
def rename_fields(
d: dict[str, t.Any],
current: str,
replace_with: str | None = None,
*,
remove_only: bool = False,
):
# We assume that the given dictionary is already flattened.
# This function will rename the keys in the dictionary.
# If `replace_with` is None, then the key will be removed.
if depth(d) != 1:
raise ValueError(
"Given dictionary is not flattened. Use flatten_dict first."
) from None
if current in d:
if remove_only:
logger.warning("Field '%s' is deprecated and will be removed." % current)
d.pop(current)
else:
assert replace_with, "'replace_with' must be provided."
logger.warning(
"Field '%s' is deprecated and has been renamed to '%s'"
% (current, replace_with)
)
d[replace_with] = d.pop(current)
punctuation = r"""!"#$%&'()*+,-./:;<=>?@[\]^`{|}~"""
def flatten_dict(
d: t.MutableMapping[str, t.Any],
parent: str = "",
sep: str = ".",
) -> t.Generator[tuple[str, t.Any], None, None]:
"""Flatten nested dictionary into a single level dictionary."""
for k, v in d.items():
k = f'"{k}"' if any(i in punctuation for i in k) else k
nkey = parent + sep + k if parent else k
if isinstance(v, t.MutableMapping):
yield from flatten_dict(
t.cast(t.MutableMapping[str, t.Any], v), parent=nkey, sep=sep
)
else:
yield nkey, v
def load_config_file(path: str) -> dict[str, t.Any]:
"""Load configuration from given path."""
if not os.path.exists(path):
raise BentoMLConfigException(
"Configuration file %s not found." % path
) from None
with open(path, "rb") as f:
config = yaml.safe_load(f)
return config
def get_default_config(version: int) -> dict[str, t.Any]:
config = load_config_file(
os.path.join(
os.path.dirname(__file__), f"v{version}", "default_configuration.yaml"
)
)
mod = import_configuration_spec(version)
assert hasattr(mod, "SCHEMA"), (
"version %d does not have a validation schema" % version
)
try:
mod.SCHEMA.validate(config)
except s.SchemaError as e:
raise BentoMLConfigException(
"Default configuration for version %d does not conform to given schema:\n%s"
% (version, e)
) from None
return config
def validate_tracing_type(tracing_type: str) -> bool:
return tracing_type in TRACING_TYPE
def validate_otlp_protocol(protocol: str) -> bool:
return protocol in ["grpc", "http"]
def ensure_larger_than(target: int | float) -> t.Callable[[int | float], bool]:
"""Ensure that given value is (lower, inf]"""
def v(value: int | float) -> bool:
return value > target
return v
ensure_larger_than_zero = ensure_larger_than(0)
def ensure_range(
lower: int | float, upper: int | float
) -> t.Callable[[int | float], bool]:
"""Ensure that given value is within the range of [lower, upper]."""
def v(value: int | float) -> bool:
return lower <= value <= upper
return v
def ensure_iterable_type(typ_: type) -> t.Callable[[t.MutableSequence[t.Any]], bool]:
"""Ensure that given mutable sequence has all elements of given types."""
def v(value: t.MutableSequence[t.Any]) -> bool:
return all(isinstance(i, typ_) for i in value)
return v
def is_valid_ip_address(addr: str) -> bool:
"""Check if given string is a valid IP address."""
try:
_ = socket.inet_aton(addr)
return True
except socket.error:
return False
| [
"noreply@github.com"
] | parano.noreply@github.com |
c90cb2c8edee52cb8802a752aa9af21056bc7d27 | b7be7c13ccfee7bf0e06e391f1d71c75276810e0 | /posts/api_urls.py | 87bdf991d33ce3f5cfbfc5f8624b5519c4e0569c | [] | no_license | eshandas/django_project_template_mongo | 0d9ec53918abca30b7d08ec8897681c20f30dc13 | 6e5c1a255ccbf7498ddca9a10eca9cfe2c97c8a9 | refs/heads/master | 2020-05-21T19:05:46.837946 | 2017-04-05T10:50:52 | 2017-04-05T10:50:52 | 65,455,598 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | from django.conf.urls import url
from .api_views import (
PostAPI,
PostsAPI,
)
urlpatterns = [
url(r'^$', PostsAPI.as_view(), name='posts'),
url(r'^(?P<post_id>[a-zA-Z0-9]+)/$', PostAPI.as_view(), name='post'),
]
| [
"eshandasnit@gmail.com"
] | eshandasnit@gmail.com |
f9a7a8d98db815e6c6b10faf55048ec728a6d87c | 3633bab8066f576c8bf9e7908afe30bb070d0b70 | /Hack-second-week/Friday/initial_program.py | 2be5bdc9787dc329446ccb6c88b231043c4542ac | [] | no_license | 6desislava6/Hack-Bulgaria | 099c195e45a443cf4a3342eff6612ac2aa66565b | de4bf7baae35e21d6a7b27d4bde68247bb85b67a | refs/heads/master | 2021-01-20T11:57:29.027595 | 2015-06-02T17:36:59 | 2015-06-02T17:36:59 | 32,828,816 | 4 | 4 | null | null | null | null | UTF-8 | Python | false | false | 160 | py | class Panda:
def __init__(self, weight):
self._weight = weight
# Пак може да се види... въпреки, че има чертичка
| [
"desislavatsvetkova@mail.bg"
] | desislavatsvetkova@mail.bg |
1ac8cfca27b83bf2beb4c6a26a8872a4b9b2a825 | 1858c78932d3e6cfeb46021176cff172457ee8bb | /polonator/fluidics/src/testing/excel_test.py | e4cd32bfd5269f7f4a6e11e16c562556157edd37 | [
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-secret-labs-2011"
] | permissive | grinner/Polonator | c5ddbc592afb022f9a2dfd4381560046fcac0371 | 512a30ca382d8cdcf8287488bdc70cb0f37a241c | refs/heads/master | 2021-05-15T01:47:00.506126 | 2019-04-19T01:41:59 | 2019-04-19T01:41:59 | 1,393,059 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,930 | py | """
---------------------------------------------------------------------------
Author: Mirko Palla.
Date: February 13, 2008.
For: G.007 polony sequencer design [fluidics software] at the Church Lab -
Genetics Department, Harvard Medical School.
Purpose: This program contains the complete code for module Biochem,
containing re-occurring biochecmistry subroutines in Python.
This software may be used, modified, and distributed freely, but this
header may not be modified and must appear at the top of this file.
------------------------------------------------------------------------------
"""
import xlrd
book = xlrd.open_workbook("G007 Fluidic Volumes.xls") # read in Excel file into 'xlrd' object
sh = book.sheet_by_index(0) # create handle for first Excel sheet
#-----------------------------------------------------------------------------
# INTERNAL VOLUMES
#-----------------------------------------------------------------------------
i_volumes = {} # create dictionary for internal tubing volumes
for row in range(2, 44):
from_row = sh.cell_value(rowx=row, colx=0)
to_row = sh.cell_value(rowx=row, colx=1)
# From reagent block into manifold
block_to_manifold = sh.cell_value(rowx=row, colx=2)
tube_area_1 = sh.cell_value(rowx=row, colx=3)
tube_volume_1 = block_to_manifold * tube_area_1 # tube volume 1
# Manifold layer transition
manifold_transition = sh.cell_value(rowx=row, colx=5)
hole_area = sh.cell_value(rowx=row, colx=6)
transition_volume = manifold_transition * hole_area # transition_volume
# Manifold path length
manifold_path_length = sh.cell_value(rowx=row, colx=8)
path_area = sh.cell_value(rowx=row, colx=9)
path_volume = manifold_path_length * path_area # path volume
# From manifold to valve
manifold_to_valve = sh.cell_value(rowx=row, colx=11)
tube_area_2 = sh.cell_value(rowx=row, colx=12)
tube_volume_2 = manifold_to_valve * tube_area_2 # tube volume 2
#--------------------------- Volume dictionary creation -------------------
total_volume = tube_volume_1 + transition_volume + path_volume + tube_volume_2 # total volume sum
if not i_volumes.has_key(to_row):
i_volumes[to_row] = {}
"""
try:
i_volumes[to_row]
except KeyError:
i_volumes[to_row] = {}
"""
i_volumes[to_row][from_row] = total_volume
print "\n--> TESTING INTERNAL:", i_volumes
#-----------------------------------------------------------------------------
# EXTERNAL VOLUMES
#-----------------------------------------------------------------------------
e_volumes = {} # create dictionary for external tubing volumes
for row in range(47, 62):
from_row = sh.cell_value(rowx=row, colx=0)
to_row = sh.cell_value(rowx=row, colx=1)
# Tubing run length
tubing_run = sh.cell_value(rowx=row, colx=2)
cross_sectional_area = sh.cell_value(rowx=row, colx=4)
total_volume = tubing_run * cross_sectional_area # tubing path volume
#--------------------------- Volume dictionary creation ------------------
if not e_volumes.has_key(to_row):
e_volumes[to_row] = {}
e_volumes[to_row][from_row] = total_volume
print "\n\n--> TESTING EXTERNAL:", e_volumes
#-----------------------------------------------------------------------------
# MIXING CHAMBER VOLUMES
#-----------------------------------------------------------------------------
for row in range(63, 65):
from_row = sh.cell_value(rowx=row, colx=0)
to_row = sh.cell_value(rowx=row, colx=1)
if not e_volumes.has_key(to_row):
e_volumes[to_row] = {}
e_volumes[to_row][from_row] = sh.cell_value(rowx=row, colx=5)
print "\n\n--> TESTING MIXER:", e_volumes
print "\n--> FINISHED VOLUME TESTING\n"
| [
"a.grinner@gmail.com"
] | a.grinner@gmail.com |
9ddad79b454f8bbbcde4f02b07870ad2085c6658 | 4c83b4d7aca6bbcd15b922ad7314440fea7c9a70 | /2020-08-14_p3_idade/script_matriz1b_idade_2020-03-14.py | 84f86d5b5f3afb767b7a76567d19f46eaf04c2dc | [] | no_license | poloplanejamento/odmatrix-joinville | 63b60a85055700698cdb590c181e7c8a4d5c7361 | be7ce0814fb9dad2d289cd836dde51baa9c0850d | refs/heads/main | 2023-01-23T11:43:45.451126 | 2020-12-10T23:17:58 | 2020-12-10T23:17:58 | 320,402,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,813 | py | #!/bin/env python3
from http.client import HTTPSConnection
from base64 import b64encode
import json
import csv
import pandas as pd
projectID = "42"
c = HTTPSConnection("api.odmatrix.app")
userAndPass = b64encode(b"fe6b53f0280443d5bd40d5d30694f356").decode("ascii")
headers = { 'Authorization' : 'Basic %s' % userAndPass }
finall_list = []
for fage in ["15_19","20_24","25_29","30_34","35_39","40_44","45_49","50_54","55_59","60_64","65_69","70_74","75_79","80_84","85_89","90_94","0_14"] :
for date in ["2020-03-14"] :
for ftriptype in ["microtrip","bus","private_transport"] :
request = "/generatematrix?format=json&project={}&date={}&ftriptype={}&fage={}".format(projectID, date, ftriptype, fage)
c.request('GET', request, headers=headers)
res = c.getresponse()
data = res.read()
matrix = json.loads(data)
print(request)
for i, column in enumerate(matrix['ColumnLabels']):
for j, row in enumerate(matrix['RowLabels']):
value = matrix['Data'][j][i]
if value == 0:
continue
full_row = {}
full_row['ProjectID'] = projectID
full_row['Date'] = date
full_row['Origin'] = row
full_row['Idade'] = fage
full_row['Destination'] = column
full_row['Modo'] = ftriptype
full_row['Trips'] = value
finall_list.append(full_row)
#print(finall_list)
data = pd.DataFrame(finall_list)
final_data = pd.pivot_table(data, index=['ProjectID', 'Date', 'Origin', 'Destination', 'Modo'], columns='Idade', values='Trips')
final_data.to_csv("matriz1b_classe_2020-03-14.csv")
| [
"caiocco@gmail.com"
] | caiocco@gmail.com |
bc2f34a32d87fbd2859ab96dae35ff0fbaec3316 | 7bfb0fff9d833e53573c90f6ec58c215b4982d14 | /1081_smallest_subsequence_of_distinct_chars.py | fe3f41a7c1e3916fdd167b07def09b2d06e37a68 | [
"MIT"
] | permissive | claytonjwong/leetcode-py | 6619aa969649597a240e84bdb548718e754daa42 | 16bbf8ac0ba5c80fe3ef67ade0d61a12991270a7 | refs/heads/master | 2023-07-14T23:40:26.569825 | 2021-08-22T17:23:20 | 2021-08-22T17:23:20 | 279,882,918 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py | #
# 1081. Smallest Subsequence of Distinct Characters
#
# Q: https://leetcode.com/problems/smallest-subsequence-of-distinct-characters/
# A: https://leetcode.com/problems/smallest-subsequence-of-distinct-characters/discuss/891644/Kt-Js-Py3-C%2B%2B-Monotonic-Queue-%2B-Detailed-Explanation
#
class Solution:
def smallestSubsequence(self, s: str) -> str:
A, have = [], set()
last = {c: i for i, c in enumerate(s)}
for i, c in enumerate(s):
while c not in have and len(A) and ord(c) < ord(A[-1]) and i < last[A[-1]]:
have.remove(A.pop())
if c not in have:
have.add(c); A.append(c)
return ''.join(A)
| [
"claytonjwong@gmail.com"
] | claytonjwong@gmail.com |
c6fcfdd18cc8d5c82a78017317b30abaa333af6d | ff3727418c813ec475fd6cc53787de80497eaf1e | /bobo/direction/carte/test/nouvel_pos.py | cdb231a34c34681ccfb71d3e3a3c5631295fb9e3 | [] | no_license | pastrouveedespeudo/Final-Project | 7141133ea432543a32914f3eb28d497521e338ba | a41d04817b22bfa04d22d9fc0fcfcd0c3e8c679a | refs/heads/master | 2022-12-19T13:32:01.831673 | 2019-08-08T20:43:40 | 2019-08-08T20:43:40 | 193,540,979 | 0 | 0 | null | 2022-11-22T03:54:51 | 2019-06-24T16:27:33 | Python | UTF-8 | Python | false | false | 2,946 | py | """Here we calculate the new
direction. We have calculate
a one degree = 111.11 km
so we *0.009 the number of
km for have degrees to km and add it to lat
if direction isn't sud, nord
east or west we * by degrees
of circle trigo"""
from math import cos
from math import radians
def long_lat_1(sens, lat, long, kilometre):
"""function to lighten long_lat"""
if sens == 'sudsudouest':
lat = lat + kilometre
long = long + kilometre * cos(radians(67.7))
elif sens == 'sudouest':
lat = lat + kilometre
long = long + kilometre * cos(radians(45))
elif sens == 'ouestsudouest':
lat = lat + kilometre
long = long + kilometre * cos(radians(22.5))
elif sens == 'ouestnordouest':
lat = lat - kilometre
long = long - kilometre * cos(radians(157.5))
elif sens == 'nordouest':
lat = lat - kilometre
long = long - kilometre * cos(radians(135))
elif sens == 'nordnordouest':
lat = lat - kilometre
long = long - kilometre * cos(radians(112.5))
elif sens == 'estsudest':
lat = lat + kilometre
long = long - kilometre * cos(radians(337))
elif sens == 'sudest':
lat = lat + kilometre
long = long - kilometre * cos(radians(315))
elif sens == 'sudsudest':
lat = lat + kilometre
long = long - kilometre * cos(radians(292.5))
return lat, long
def long_lat(lat, long, kiloms, sens):
"""We calculate new direction for example if
direction is sud, we take the current latitude
we add the kilometer. This is our new latitude.
For north east, we subtracted kilometer to latitude
and longitude and for longitude
multiply by cos(337) radiant !"""
try:
kiloms = float(kiloms)
except TypeError:
kiloms = 20.0
kiloms = float(kiloms)
except ValueError:
kiloms = 20.0
kiloms = float(kiloms)
if kiloms > 500:
kiloms = 20.0
kilometre = kiloms * 0.009
if sens == 'sud':
lat1 = kilometre
nouvel_lat = lat + lat1
lat = nouvel_lat
elif sens == 'nord':
lat1 = kilometre
nouvel_lat = lat - lat1
lat = nouvel_lat
elif sens == 'ouest':
long = long + kilometre
elif sens == 'est':
kilo = kilometre
long1 = long - kilo
long = long1
elif sens == 'nordnordest':
lat = lat - kilometre
long = long - kilometre * cos(radians(337))
elif sens == 'nordest':
lat = lat - kilometre
long = long - kilometre * cos(radians(315))
elif sens == 'estnordest':
lat = lat - kilometre
long = long - kilometre * cos(radians(292.5))
else:
lat, long = long_lat_1(sens, lat, long, kilometre)
return lat, long
| [
"noreply@github.com"
] | pastrouveedespeudo.noreply@github.com |
1f49ef3154e7a7d1b7fbe152bd62734bf5a80eb1 | ea42986fbffb82b57df399fb90f3b962a63fef45 | /quiz/migrations/0001_initial.py | 3fa36a39cdc22bc3f128b566833b4548b476d700 | [] | no_license | akhad97/Quiz_API | 2d0b28ed3e7c6a7fc1cabc5b91d5b73d7173e10c | f45a99faf0f0109799c5506066e19d22dc4c201a | refs/heads/master | 2023-03-27T10:09:49.741610 | 2021-03-29T09:26:36 | 2021-03-29T09:26:36 | 352,585,216 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,729 | py | # Generated by Django 3.1.7 on 2021-03-10 11:26
from django.conf import settings
import django.contrib.auth.models
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0012_alter_user_first_name_max_length'),
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('username', models.CharField(max_length=50, unique=True)),
('score', models.IntegerField(default=0)),
('email', models.EmailField(max_length=254, verbose_name='email address')),
('first_name', models.CharField(blank=True, max_length=255, verbose_name='First Name')),
('last_name', models.CharField(blank=True, max_length=255, verbose_name='Last Name')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('label', models.CharField(max_length=250)),
('is_correct', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='EmailVerification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=254)),
('code', models.CharField(max_length=30)),
('expire_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.CharField(max_length=100)),
('order', models.IntegerField(default=0)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Quiz',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('code', models.CharField(max_length=30)),
('slug', models.SlugField(blank=True)),
('roll_out', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': 'Quizzes',
'ordering': ['created_at'],
},
),
migrations.CreateModel(
name='QuizTaker',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('score', models.IntegerField(default=0)),
('completed', models.BooleanField(default=False)),
('date_finished', models.DateTimeField(null=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('quiz', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.quiz')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UsersAnswer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('answer', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='quiz.answer')),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.question')),
('quiz_taker', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quiz.quiztaker')),
],
),
migrations.AddField(
model_name='question',
name='quiz',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='quizzes', to='quiz.quiz'),
),
migrations.AddField(
model_name='answer',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='answers', to='quiz.question'),
),
migrations.AddField(
model_name='customuser',
name='quiz',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='quiz.quiz'),
),
migrations.AddField(
model_name='customuser',
name='user_permissions',
field=models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'),
),
]
| [
"ahadjon.abdullaev1997@gmail.com"
] | ahadjon.abdullaev1997@gmail.com |
9c8c7eff1b9ec6d1190a1964535c46aa88f98cfa | 93549eba4b184d79af808bd1e8846995553a1f92 | /dataMiningFrame_GammaLab/src/dependence.py | 30194a19db4b38ab2fc3fff49832a0e755507cd5 | [] | no_license | morindaz/audioEmotion_relative | 4d91e36f8716d5ba77eb7f170e5eac6b5a325a75 | 9ab298a37504dc6fb48e2ae9e34156ce8f78b8cf | refs/heads/master | 2021-04-27T00:08:58.069396 | 2018-03-04T05:11:22 | 2018-03-04T05:11:22 | 123,757,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 564 | py | # coding=utf-8
'''
此文件中记录一些常用的配置文件,包含:
读入数据的位置csvPath
特征存放位置featurePath
模型位置modelPath+estimatorName
模型参数位置modelPath+paraName
'''
csvPath ='../data/selectedCombinedAllwithLabel.csv' #读入数据的位置
testPath = '../data/test.csv'
trainPath = '../data/train.csv'
featureBasic = '..//feature//'
featureName = 'cv0Clear30.csv' #特征的具体csv名字
featurePath = featureBasic+featureName
modelPath = '..//models//'
estimatorName = "estimator.model"
paramName = "param.pkl"
| [
"morindaz.mao@ucloud.cn"
] | morindaz.mao@ucloud.cn |
29995561366f597a2dfee137c6d6b0c14f411424 | d820c9b02da27d54520ae7a0557a939ed4c1d04a | /Level_1/직사각형별찍기.py | 348bb6c28e552584598e193f9722f12d9397cf0c | [] | no_license | Parkyunhwan/Programmers | fc42f83a92cdc2800c5ef0d2d925ebe099caa427 | 39ea61674a5e5bb569e8831bfbf4ba64597088f8 | refs/heads/master | 2020-12-02T09:08:29.530002 | 2020-10-23T13:06:21 | 2020-10-23T13:06:21 | 230,956,911 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 358 | py | # 정석풀이
## print("", end='') end기능을 통해 뒤에 어떤 문자가 올지 지정해줄 수 있다.
a, b = map(int, input().strip().split(' '))
answer=""
ina = []
for i in range(b):
for j in range(a):
print("*", end='')
print()
# 연산자 풀이
a, b = map(int, input().strip().split(' '))
answer=("*"*a + "\n")*b
print(answer) | [
"pyh8618@gmail.com"
] | pyh8618@gmail.com |
05ee715c3472b1cb03f10effc47b4b0c9e0bb6b4 | df694423318926cf0cac2d8a293a232713d5fc69 | /ads/templatetags/contacted.py | 1a97fe0e386a3b9a314f068bf113505393ef1885 | [] | no_license | ouhouhsami/sanscom | 3e29da2dbbd3f3e9e094ed4546e879f9bf985404 | 23fe68be45649173841832361208ebdb65ffa599 | refs/heads/master | 2020-12-24T14:44:50.521981 | 2015-04-24T12:23:49 | 2015-04-24T12:23:49 | 32,098,222 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | from ads.models import Ad, Search, AdSearchRelation
from django import template
register = template.Library()
@register.filter(name='contacted')
def contacted(user, obj):
if obj.__class__ == Ad:
asr = AdSearchRelation.objects.filter(ad=obj, search__user=user).values_list('ad_contacted', flat=True)
if any(asr):
return True
return False
if obj.__class__ == Search:
asr = AdSearchRelation.objects.filter(search=obj, ad__user=user).values_list('search_contacted', flat=True)
if any(asr):
return True
return False
| [
"samuel.goldszmidt@gmail.com"
] | samuel.goldszmidt@gmail.com |
6dfce5b7c8699fdd856973fae4b9db264282530f | 2c003dd5a2832efa8b98e7b65531cae67662afa5 | /lib/utils/image.py | 49ff33dc350e48b46164ad9996163450e3fc1c6c | [] | no_license | tonysy/DRN-MXNet | e8c3c85070febf2b89a1ad3b34ccc63b5f55d424 | bc4b06c952219f678ef2b6cf770fa20f59640b7a | refs/heads/master | 2020-03-26T09:01:54.078029 | 2018-08-16T03:15:12 | 2018-08-16T03:15:12 | 144,732,045 | 24 | 4 | null | null | null | null | UTF-8 | Python | false | false | 15,586 | py | import numpy as np
import os
import cv2
import random
import math
from PIL import Image
from bbox.bbox_transform import clip_boxes
from functools import partial
from multiprocessing import Pool
from matplotlib import pyplot as plt
Debug = 0
def showImwithGt(im, gt):
print type(gt)
print gt.shape, im.shape
gt_color = Image.fromarray(gt, mode='P')
# n = num_cls
pallete_raw = np.zeros((256, 3)).astype('uint8')
# pallete = np.zeros((n, 3)).astype('uint8')
pallete_raw[0, :] = [128, 64, 128]
pallete_raw[1, :] = [244, 35, 232]
pallete_raw[2, :] = [70, 70, 70]
pallete_raw[3, :] = [102, 102, 156]
pallete_raw[4, :] = [190, 153, 153]
pallete_raw[5, :] = [153, 153, 153]
pallete_raw[6, :] = [250, 170, 30]
pallete_raw[7, :] = [220, 220, 0]
pallete_raw[8, :] = [107, 142, 35]
pallete_raw[9, :] = [152, 251, 152]
pallete_raw[10, :] = [70, 130, 180]
pallete_raw[11, :] = [220, 20, 60]
pallete_raw[12, :] = [255, 0, 0]
pallete_raw[13, :] = [0, 0, 142]
pallete_raw[14, :] = [0, 0, 70]
pallete_raw[15, :] = [0, 60, 100]
pallete_raw[16, :] = [0, 80, 100]
pallete_raw[17, :] = [0, 0, 230]
pallete_raw[18, :] = [119, 11, 32]
pallete_raw[255, :] = [224, 224, 224]
pallete_raw = pallete_raw.reshape(-1)
gt_color.putpalette(pallete_raw)
gt_color = np.array(gt_color.convert('RGB'))
im_blending = cv2.addWeighted(im, 0.5, gt_color, 0.5, 0)
# plt.subplot(1, 3, 1)
# plt.imshow(im)
# plt.subplot(1, 3, 2)
# plt.imshow(gt_color)
# plt.subplot(1, 3, 3)
plt.imshow(im_blending)
plt.show()
def showGt(gt):
# print gt.shape
# gt = gt.setmode('P');
# n = num_cls
gt = Image.fromarray(gt, mode='P')
pallete_raw = np.zeros((256, 3)).astype('uint8')
# pallete = np.zeros((n, 3)).astype('uint8')
pallete_raw[0, :] = [128, 64, 128]
pallete_raw[1, :] = [244, 35, 232]
pallete_raw[2, :] = [70, 70, 70]
pallete_raw[3, :] = [102, 102, 156]
pallete_raw[4, :] = [190, 153, 153]
pallete_raw[5, :] = [153, 153, 153]
pallete_raw[6, :] = [250, 170, 30]
pallete_raw[7, :] = [220, 220, 0]
pallete_raw[8, :] = [107, 142, 35]
pallete_raw[9, :] = [152, 251, 152]
pallete_raw[10, :] = [70, 130, 180]
pallete_raw[11, :] = [220, 20, 60]
pallete_raw[12, :] = [255, 0, 0]
pallete_raw[13, :] = [0, 0, 142]
pallete_raw[14, :] = [0, 0, 70]
pallete_raw[15, :] = [0, 60, 100]
pallete_raw[16, :] = [0, 80, 100]
pallete_raw[17, :] = [0, 0, 230]
pallete_raw[18, :] = [119, 11, 32]
pallete_raw[255, :] = [224, 224, 224]
pallete_raw = pallete_raw.reshape(-1)
gt.putpalette(pallete_raw)
gt_color = np.array(gt.convert('RGB'))
# plt.imshow(im)
# plt.subplot(1, 3, 2)
# plt.imshow(gt_color)
# plt.subplot(1, 3, 3)
plt.imshow(gt_color)
plt.show()
def resize_softmax_output_one_channel(target_size,softmax_one_channel):
"""
:param target_size: [target_height,target_width]
:param softmax_one: [H,W] two channel
:return:
"""
softmax_one_channel_result = np.array(Image.fromarray(softmax_one_channel).
resize((target_size[1], target_size[0]),
Image.CUBIC))
return softmax_one_channel_result
def resize_an_softmax_output(target_size, softmax_output, num_threads=4):
"""
:param target_size: [imh,imw]
:param softmax_output: [num_class,imh,imw]
:param num_threads:
:return:
"""
worker = partial(resize_softmax_output_one_channel, target_size)
if num_threads == 1:
ret = [worker(_) for _ in softmax_output]
else:
pool = Pool(num_threads)
ret = pool.map(worker, [_ for _ in softmax_output])
pool.close()
result = [np.expand_dims(np.array(_),0) for _ in ret ]
return tensor_vstack(result)
def resize_batch_softmax_output(softmax_outputs, target_size):
"""
:param softmax_outputs: [batch, num_class, imh, imw]
:param target_size: [imh, imw]
:return: softmax_outputs: [batch, num_class, target_size[0], target_size[1]]
"""
softmax_resize_outputs = []
for softmax_output in softmax_outputs:
softmax_resize_outputs.append(np.expand_dims(resize_an_softmax_output(target_size=target_size, softmax_output=softmax_output),0))
return tensor_vstack(softmax_resize_outputs)
def resize_seg_pillow_target(im,target_w,target_h):
Image.fromarray(im.astype(np.uint8,copy=False))
def resize_seg_target(im,target_w,target_h,stride):
im, _ = resize(im,target_w,target_h,interpolation=cv2.INTER_NEAREST)
im = im[::stride,::stride]
assert im.shape[0]== target_h//stride and im.shape[1] == target_w//stride
return im
def resize_target(im, target_w, target_h,interpolation = cv2.INTER_LINEAR):
"""
:param im: image
:param target_w: target_w
:param target_h: target_h
:param interpolation: interpolation: if given,
using given interpolation method to resize image
:return:
"""
return cv2.resize(im,(target_w,target_h),None,interpolation = interpolation)
def resize_im_target(im,target_w,target_h, use_random=False):
"""
:param im: origin image
:param target_w: target w
:param target_h: target h
:return:
"""
origin_h,origin_w = im.shape[:2]
if origin_h > target_h and origin_w>target_w:
interpolation = cv2.INTER_AREA
elif origin_h < target_h and origin_w<target_w:
interpolation = cv2.INTER_CUBIC
else:
interpolation = cv2.INTER_LINEAR
if use_random:
interp_methods = [cv2.INTER_LINEAR, cv2.INTER_CUBIC, cv2.INTER_AREA, \
cv2.INTER_NEAREST, cv2.INTER_LANCZOS4]
interpolation = random.choice(interp_methods)
return resize_target(im, target_w, target_h,interpolation)
def resize_seg(im, target_size, max_size, stride=0):
im_shape = im.shape
im_size_min = np.min(im_shape[0:2])
im_size_max = np.max(im_shape[0:2])
im_scale = float(target_size) / float(im_size_min)
# prevent bigger axis from being more than max_size:
if np.round(im_scale * im_size_max) > max_size:
im_scale = float(max_size) / float(im_size_max)
im = np.array(cv2.resize(im, None, None, fx=im_scale, fy=im_scale,interpolation=cv2.INTER_NEAREST))
if stride == 0:
return im, im_scale
else:
# pad to product of stride
im_height = int(np.ceil(im.shape[0] / float(stride)) * stride)
im_width = int(np.ceil(im.shape[1] / float(stride)) * stride)
padded_im = np.ones((im_height, im_width),np.uint8)* 255
padded_im[:im.shape[0], :im.shape[1]] = im
return padded_im, im_scale
def resize(im, target_size, max_size, stride=0, interpolation = cv2.INTER_LINEAR):
"""
only resize input image to target size and return scale
:param im: BGR image input by opencv
:param target_size: one dimensional size (the short side)
:param max_size: one dimensional max size (the long side)
:param stride: if given, pad the image to designated stride
:param interpolation: if given, using given interpolation method to resize image
:return:
"""
im_shape = im.shape
im_size_min = np.min(im_shape[0:2])
im_size_max = np.max(im_shape[0:2])
im_scale = float(target_size) / float(im_size_min)
# prevent bigger axis from being more than max_size:
if np.round(im_scale * im_size_max) > max_size:
im_scale = float(max_size) / float(im_size_max)
im = np.array(cv2.resize(im, None, None, fx=im_scale, fy=im_scale, interpolation=interpolation))
if stride == 0:
return im, im_scale
else:
# pad to product of stride
im_height = int(np.ceil(im.shape[0] / float(stride)) * stride)
im_width = int(np.ceil(im.shape[1] / float(stride)) * stride)
im_channel = im.shape[2]
padded_im = np.zeros((im_height, im_width, im_channel)).astype(np.uint8)
padded_im[:im.shape[0], :im.shape[1], :] = im
return padded_im, im_scale
def resize_one_target(im, target_w, target_h, interpolation=cv2.INTER_LINEAR):
im_tensor = np.zeros((im.shape[1], im.shape[2], 3))
for i in range(3):
im_tensor[:, :, i] = im[i, :, :]
im_tensor = cv2.resize(im_tensor, (target_w,target_h), None, interpolation=interpolation)
im = np.zeros((3,im_tensor.shape[0], im_tensor.shape[1]))
for i in range(3):
im[i, :, :] = im_tensor[:, :,i]
return im
def resize_batch_target(im_tensor,target_w,target_h, interpolation=cv2.INTER_LINEAR):
res = []
for im in im_tensor:
res.append(np.expand_dims(resize_one_target(im, target_w, target_h, interpolation), 0))
res = tensor_vstack(res)
return res
def rotation(im,target_degree,interpolation=cv2.INTER_LINEAR,fixed_scale=True,borderValue=(255,255,255)):
height,width = im.shape[:2]
if not fixed_scale:
if target_degree%180 == 0:
scale = 1
elif target_degree%90 == 0:
scale = float(max(height, width)) / min(height, width)
else:
scale = math.sqrt(pow(height,2)+pow(width,2))/min(height, width)
else:
scale = 1
rotateMat = cv2.getRotationMatrix2D((width / 2, height / 2), target_degree, scale)
im = cv2.warpAffine(im,rotateMat,(width,height),flags=interpolation,borderValue=borderValue)
return im, scale
def flip(im):
if len(im.shape)==2:
return im[:,::-1]
elif len(im.shape)==3:
return im[:,::-1,:]
else:
return NotImplementedError
def transform(im, pixel_means,color_scale=0,pixel_stds = None):
"""
transform into mxnet tensor
substract pixel size and transform to correct format
:param im: [height, width, channel] in BGR
:param pixel_means: [B, G, R pixel means]
:return: [batch, channel, height, width]
"""
if color_scale > 0:
im = im * color_scale
im = im - pixel_means
if pixel_stds is not None:
im /= pixel_stds
im_tensor = np.zeros((1, 3, im.shape[0], im.shape[1]))
for i in range(3):
im_tensor[0, i, :, :] = im[:, :, 2 - i]
return im_tensor
def transform_seg_gt(gt):
"""
transform segmentation gt image into mxnet tensor
:param gt: [height, width, channel = 1]
:return: [channel = 1, height, width]
"""
gt_tensor = np.zeros((1, 1, gt.shape[0], gt.shape[1]))
gt_tensor[0, 0, :, :] = gt[:, :]
return gt_tensor
def transform_inverse(im_tensor, pixel_means):
"""
transform from mxnet im_tensor to ordinary RGB image
im_tensor is limited to one image
:param im_tensor: [batch, channel, height, width]
:param pixel_means: [B, G, R pixel means]
:return: im [height, width, channel(RGB)]
"""
assert im_tensor.shape[0] == 1
im_tensor = im_tensor.copy()
# put channel back
channel_swap = (0, 2, 3, 1)
im_tensor = im_tensor.transpose(channel_swap)
im = im_tensor[0]
assert im.shape[2] == 3
im += pixel_means[[2, 1, 0]]
im = im.astype(np.uint8)
return im
def tensor_vstack(tensor_list, pad=0):
"""
vertically stack tensors
:param tensor_list: list of tensor to be stacked vertically
:param pad: label to pad with
:return: tensor with max shape
"""
ndim = len(tensor_list[0].shape)
dtype = tensor_list[0].dtype
islice = tensor_list[0].shape[0]
dimensions = []
first_dim = sum([tensor.shape[0] for tensor in tensor_list])
dimensions.append(first_dim)
for dim in range(1, ndim):
dimensions.append(max([tensor.shape[dim] for tensor in tensor_list]))
if pad == 0:
all_tensor = np.zeros(tuple(dimensions), dtype=dtype)
elif pad == 1:
all_tensor = np.ones(tuple(dimensions), dtype=dtype)
else:
all_tensor = np.full(tuple(dimensions), pad, dtype=dtype)
if ndim == 1:
for ind, tensor in enumerate(tensor_list):
all_tensor[ind*islice:(ind+1)*islice] = tensor
elif ndim == 2:
for ind, tensor in enumerate(tensor_list):
all_tensor[ind*islice:(ind+1)*islice, :tensor.shape[1]] = tensor
elif ndim == 3:
for ind, tensor in enumerate(tensor_list):
all_tensor[ind*islice:(ind+1)*islice, :tensor.shape[1], :tensor.shape[2]] = tensor
elif ndim == 4:
for ind, tensor in enumerate(tensor_list):
all_tensor[ind*islice:(ind+1)*islice, :tensor.shape[1], :tensor.shape[2], :tensor.shape[3]] = tensor
else:
raise Exception('Sorry, unimplemented.')
return all_tensor
def generate_metric_label(label,skip_step=1):
label_batchsize,label_channel,label_h,label_w = label.shape
anchor_image = np.full((label_batchsize, label_channel, label_h+2*skip_step,label_w+2*skip_step),-1)
anchor_image[:,:,skip_step:skip_step+label_h,skip_step:skip_step+label_w] = label
metric_labels = []
for ix in xrange(-1,2,1):
for iy in xrange(-1,2,1):
label_image = np.full((label_batchsize, label_channel, label_h + 2*skip_step, label_w + 2*skip_step), -1)
label_image[:,:,skip_step*(1+ix):skip_step*(1+ix)+label_h,skip_step*(1+iy):skip_step*(1+iy)+label_w] = label
# metric_label = ((label_image == anchor_image)*((label_image != 255)*(anchor_image != 255)))
metric_label = (label_image == anchor_image)
metric_label = metric_label[:,:,skip_step*(1+ix):skip_step*(1+ix)+label_h,
skip_step*(1+iy):skip_step*(1+iy)+label_w]
metric_labels.append(metric_label.astype(np.uint8))
if Debug:
print ix,iy
print metric_label.shape
for one_metric_label,one_label_image,one_anchor_image in zip(metric_label,label_image,anchor_image):
plot_border(one_metric_label.astype(np.uint8),one_label_image,one_anchor_image)
result = np.stack(metric_labels,1)
return result
def border_ignore_label(label,ignore_size,pad_value= 255.0):
label_batchsize, label_channel, label_h, label_w = label.shape
result_label = np.full((label_batchsize, label_channel, label_h, label_w), pad_value)
result_label[:,:,ignore_size:label_h-ignore_size,
ignore_size:label_w-ignore_size]= label[:,:,ignore_size:label_h-ignore_size,
ignore_size:label_w-ignore_size]
return result_label
def plot_border(metric_label,one_label_image,one_anchor_image):
pallete_raw = np.zeros((256, 3)).astype('uint8')
pallete_raw[0, :] = [128, 64, 128]
pallete_raw[1, :] = [244, 35, 232]
pallete_raw[2, :] = [70, 70, 70]
pallete_raw[3, :] = [102, 102, 156]
pallete_raw[4, :] = [190, 153, 153]
pallete_raw[5, :] = [153, 153, 153]
pallete_raw[6, :] = [250, 170, 30]
pallete_raw[7, :] = [220, 220, 0]
pallete_raw[8, :] = [107, 142, 35]
pallete_raw[9, :] = [152, 251, 152]
pallete_raw[10, :] = [70, 130, 180]
pallete_raw[11, :] = [220, 20, 60]
pallete_raw[12, :] = [255, 0, 0]
pallete_raw[13, :] = [0, 0, 142]
pallete_raw[14, :] = [0, 0, 70]
pallete_raw[15, :] = [0, 60, 100]
pallete_raw[16, :] = [0, 80, 100]
pallete_raw[17, :] = [0, 0, 230]
pallete_raw[18, :] = [119, 11, 32]
pallete_raw[255, :] = [224, 224, 224]
plt.subplot(131)
plt.imshow(np.squeeze(metric_label))
plt.subplot(132)
plt.imshow(pallete_raw[np.squeeze(one_label_image)])
plt.subplot(133)
plt.imshow(pallete_raw[np.squeeze(one_anchor_image)])
plt.show() | [
"sy.zhangbuaa@gmail.com"
] | sy.zhangbuaa@gmail.com |
526ade87755fc44b5fb54f1538fc465866bf5842 | 65f7e25aeff0c400e9d82a6b70d746f02ff58c61 | /openstackclient/tests/identity/v2_0/test_identity.py | 8a50a48a063ddd1b90ae171f87a4b2d611e7dada | [
"Apache-2.0"
] | permissive | pombredanne/python-openstackclient | 877255c5d0962a5a202133675ca3199c4d36ec62 | 6fe687fdf662a7495b20a1d94f27bf557525af58 | refs/heads/master | 2020-12-29T02:47:08.116982 | 2013-09-12T19:49:41 | 2013-09-12T21:23:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 996 | py | # Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from openstackclient.tests.identity.v2_0 import fakes
from openstackclient.tests import utils
AUTH_TOKEN = "foobar"
AUTH_URL = "http://0.0.0.0"
class TestIdentityv2(utils.TestCommand):
def setUp(self):
super(TestIdentityv2, self).setUp()
self.app.client_manager.identity = fakes.FakeIdentityv2Client(
endpoint=AUTH_URL,
token=AUTH_TOKEN,
)
| [
"dtroyer@gmail.com"
] | dtroyer@gmail.com |
db48951ba1c8b769d951e5206cf549f040cfc177 | 9f89fa328fa17eb86c19c7c44f2ec36a71e85a85 | /tests/test_video_upload.py | 007e69cef5c47b6befe3064f8731eb00167f52ce | [
"BSD-3-Clause"
] | permissive | bjoernh/django-cast | 5d7ca6e1a8ae0a364155716232fea663be44b977 | 89527f9c38ff04745c93f395c8111883210f0cf3 | refs/heads/master | 2020-05-01T06:05:52.679191 | 2019-03-23T08:15:19 | 2019-03-23T08:15:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,216 | py | import pytest
from django.urls import reverse
from cast.models import Video
class TestVideoUpload:
@pytest.mark.django_db
def test_upload_video_not_authenticated(self, client, image_1px_io):
upload_url = reverse("cast:api:upload_video")
image_1px_io.seek(0)
r = client.post(upload_url, {"original": image_1px_io})
# redirect to login
assert r.status_code == 302
@pytest.mark.django_db
def test_upload_video_authenticated(self, client, user, image_1px_io):
# login
r = client.login(username=user.username, password=user._password)
self.called_create_poster = False
def set_called_create_poster():
self.called_create_poster = True
Video._saved_create_poster = Video._create_poster
Video._create_poster = lambda x: set_called_create_poster()
# upload
upload_url = reverse("cast:api:upload_video")
image_1px_io.seek(0)
r = client.post(upload_url, {"original": image_1px_io})
Video._create_poster = Video._saved_create_poster
assert r.status_code == 201
assert self.called_create_poster
assert int(r.content.decode("utf-8")) > 0
| [
"jochen@wersdoerfer.de"
] | jochen@wersdoerfer.de |
569b08ee643d16d49728529d3dd734b3149a976a | 42cc1007c4b206495a5ae550c58ce44f804cc09e | /backend/chat/migrations/0001_initial.py | 57b14e1aad3cb06c683a26a0a3627f6e188e7482 | [] | no_license | crowdbotics-apps/impro-go-18308 | 2370f0914074ba5c230d10ca5e74076eec02b424 | e04c1a29ad7225a0f1bdeb210e97e2e83a0d0888 | refs/heads/master | 2022-11-03T08:09:06.693271 | 2020-06-22T00:07:57 | 2020-06-22T00:07:57 | 274,003,245 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,805 | py | # Generated by Django 2.2.13 on 2020-06-22 00:07
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('chat_user_profile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message', models.TextField()),
('attachment', models.URLField()),
('is_draft', models.BooleanField()),
('is_delivered', models.BooleanField()),
('is_read', models.BooleanField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('timestamp_delivered', models.DateTimeField()),
('timestamp_read', models.DateTimeField()),
],
),
migrations.CreateModel(
name='Thread',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('thread_photo', models.URLField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='ThreadMember',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_admin', models.BooleanField()),
('timestamp_joined', models.DateTimeField(auto_now_add=True)),
('timestamp_left', models.DateTimeField()),
('last_rejoined', models.DateTimeField()),
('profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='threadmember_profile', to='chat_user_profile.Profile')),
('thread', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='threadmember_thread', to='chat.Thread')),
],
),
migrations.CreateModel(
name='ThreadAction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('action', models.CharField(max_length=7)),
('timestamp_action', models.DateTimeField(auto_now_add=True)),
('profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='threadaction_profile', to='chat_user_profile.Profile')),
('thread', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='threadaction_thread', to='chat.Thread')),
],
),
migrations.CreateModel(
name='MessageAction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('action', models.CharField(max_length=7)),
('timestamp_action', models.DateTimeField(auto_now_add=True)),
('message', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='messageaction_message', to='chat.Message')),
('profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='messageaction_profile', to='chat_user_profile.Profile')),
],
),
migrations.AddField(
model_name='message',
name='sent_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='message_sent_by', to='chat.ThreadMember'),
),
migrations.AddField(
model_name='message',
name='thread',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='message_thread', to='chat.Thread'),
),
migrations.CreateModel(
name='ForwardedMessage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp_forwarded', models.DateTimeField(auto_now_add=True)),
('forwarded_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='forwardedmessage_forwarded_by', to='chat_user_profile.Profile')),
('forwarded_to', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='forwardedmessage_forwarded_to', to='chat.Thread')),
('message', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='forwardedmessage_message', to='chat.Message')),
],
),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
228c06865e9face05da10ba72bf837de0c38b96b | cd1132c39b02e8997a4da832f9c2b760caba1801 | /napari/layers/image/experimental/octree_level.py | fa1f9a433482d77c522caf33c00ce62a90338d12 | [
"BSD-3-Clause"
] | permissive | HarshCasper/napari | 8c9f7051afc36d492f9e30760fe07758bb91e338 | 3ed7d2db678f4012753f53b2d40cff9d34a8011f | refs/heads/master | 2023-03-19T01:27:58.473927 | 2021-03-15T05:41:29 | 2021-03-15T05:41:29 | 347,844,575 | 0 | 0 | BSD-3-Clause | 2021-03-15T05:39:00 | 2021-03-15T05:11:44 | Python | UTF-8 | Python | false | false | 6,852 | py | """OctreeLevelInfo and OctreeLevel classes.
"""
import logging
import math
from typing import Dict, List, Optional
import numpy as np
from ....types import ArrayLike
from .octree_chunk import OctreeChunk, OctreeChunkGeom, OctreeLocation
from .octree_util import OctreeMetadata
LOGGER = logging.getLogger("napari.octree")
class OctreeLevelInfo:
"""Information about one level of the octree.
This should be a NamedTuple.
Parameters
----------
meta : OctreeMetadata
Information about the entire octree.
level_index : int
The index of this level within the whole tree.
"""
def __init__(self, meta: OctreeMetadata, level_index: int):
self.meta = meta
self.level_index = level_index
self.scale = 2 ** self.level_index
base = meta.base_shape
self.image_shape = (
int(base[0] / self.scale),
int(base[1] / self.scale),
)
tile_size = meta.tile_size
scaled_size = tile_size * self.scale
self.rows = math.ceil(base[0] / scaled_size)
self.cols = math.ceil(base[1] / scaled_size)
self.shape_in_tiles = [self.rows, self.cols]
self.num_tiles = self.rows * self.cols
class OctreeLevel:
"""One level of the octree.
An OctreeLevel is "sparse" in that it only contains a dict of
OctreeChunks for the portion of the octree that is currently being
rendered. So even if the full level contains hundreds of millions of
chunks, this class only contains a few dozens OctreeChunks.
This was necessary because even having a null reference for every
OctreeChunk in a level would use too much space and be too slow to
construct.
Parameters
----------
slice_id : int
The id of the OctreeSlice we are in.
data : ArrayLike
The data for this level.
meta : OctreeMetadata
The base image shape and other details.
level_index : int
Index of this specific level (0 is full resolution).
Attributes
----------
info : OctreeLevelInfo
Metadata about this level.
_tiles : Dict[tuple, OctreeChunk]
Maps (row, col) tuple to the OctreeChunk at that location.
"""
def __init__(
self,
slice_id: int,
data: ArrayLike,
meta: OctreeMetadata,
level_index: int,
):
self.slice_id = slice_id
self.data = data
self.info = OctreeLevelInfo(meta, level_index)
self._tiles: Dict[tuple, OctreeChunk] = {}
def get_chunk(
self, row: int, col: int, create=False
) -> Optional[OctreeChunk]:
"""Return the OctreeChunk at this location if it exists.
If create is True, an OctreeChunk will be created if one
does not exist at this location.
Parameters
----------
row : int
The row in the level.
col : int
The column in the level.
create : bool
If True, create the OctreeChunk if it does not exist.
Returns
-------
Optional[OctreeChunk]
The OctreeChunk if one existed or we just created it.
"""
try:
return self._tiles[(row, col)]
except KeyError:
if not create:
return None # It didn't exist so we're done.
rows, cols = self.info.shape_in_tiles
if row < 0 or row >= rows or col < 0 or col >= cols:
# The coordinates are not in the level. Not an exception because
# callers might be trying to get children just over the edge
# for non-power-of-two base images.
return None
# Create a chunk at this location and return it.
octree_chunk = self._create_chunk(row, col)
self._tiles[(row, col)] = octree_chunk
return octree_chunk
def _create_chunk(self, row: int, col: int) -> OctreeChunk:
"""Create a new OctreeChunk for this location in the level.
Parameters
----------
row : int
The row in the level.
col : int
The column in the level.
Returns
-------
OctreeChunk
The newly created chunk.
"""
level_index = self.info.level_index
meta = self.info.meta
layer_ref = meta.layer_ref
location = OctreeLocation(
layer_ref, self.slice_id, level_index, row, col
)
scale = self.info.scale
tile_size = self.info.meta.tile_size
scaled_size = tile_size * scale
pos = np.array(
[col * scaled_size, row * scaled_size], dtype=np.float32
)
data = self._get_data(row, col)
# Create OctreeChunkGeom used by the visual for rendering this
# chunk. Size it based on the base image pixels, not based on the
# data in this level, so it's exact.
base = np.array(meta.base_shape[::-1], dtype=np.float)
remain = base - pos
size = np.minimum(remain, [scaled_size, scaled_size])
geom = OctreeChunkGeom(pos, size)
# Return the newly created chunk.
return OctreeChunk(data, location, geom)
def _get_data(self, row: int, col: int) -> ArrayLike:
"""Get the chunk's data at this location.
Parameters
----------
row : int
The row coordinate.
col : int
The column coordinate.
Returns
-------
ArrayLike
The data at this location.
"""
tile_size = self.info.meta.tile_size
array_slice = (
slice(row * tile_size, (row + 1) * tile_size),
slice(col * tile_size, (col + 1) * tile_size),
)
if self.data.ndim == 3:
array_slice += (slice(None),) # Add the colors.
return self.data[array_slice]
def log_levels(levels: List[OctreeLevel], start_level: int = 0) -> None:
"""Log the dimensions of each level nicely.
We take start_level so we can log the "extra" levels we created but
with their correct level numbers.
Parameters
----------
levels : List[OctreeLevel]
Print information about these levels.
start_level : int
Start the indexing at this number, shift the indexes up.
"""
from ...._vendor.experimental.humanize.src.humanize import intword
def _dim_str(dim: tuple) -> None:
return f"({dim[0]}, {dim[1]}) = {intword(dim[0] * dim[1])}"
for index, level in enumerate(levels):
level_index = start_level + index
image_str = _dim_str(level.info.image_shape)
tiles_str = _dim_str(level.info.shape_in_tiles)
LOGGER.info(
"Level %d: %s pixels -> %s tiles",
level_index,
image_str,
tiles_str,
)
| [
"noreply@github.com"
] | HarshCasper.noreply@github.com |
f553473cad4b347a8d994f981d9e3b036e3d894d | a9f4434d3b410886ffc10aa5aede3634692152b6 | /0219/ex9.py | 7338c30194e1471d7f625e24a070bbb39ba10eb9 | [] | no_license | parka01/python_ex | d3690dcd8753864c335bf7782553719a072bd01d | a5811487516eb9ef86d5ae93e9060cac267b87ce | refs/heads/main | 2023-03-13T08:35:03.837790 | 2021-02-26T03:40:41 | 2021-02-26T03:40:41 | 339,892,972 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,106 | py | num1=input('1 번째 성적 입력: ')
num2=input('2 번째 성적 입력: ')
num3=input('3 번째 성적 입력: ')
num4=input('4 번째 성적 입력: ')
num5=input('5 번째 성적 입력: ')
if num1>0 or num1<100:
print('유효한 성적이 아닙니다.')
num2=input(2 번째 성적 입력: )
if num1>0 or num1<100:
print('유효한 성적이 아닙니다.')
else:
print(1 번째 성적:)
num3=input(3 번째 성적 입력: )
if num1>0 or num1<100:
print('유효한 성적이 아닙니다.')
else:
print(1 번째 성적:)
print(3 번째 성적:)
num4=input(4 번째 성적 입력: )
print(4 번째 성적:)
num5=input(5 번째 성적 입력: )
else:
print(5 번째 성적:)
print(5 번째 성적:)
print('총점: ')
print('평균: ')
#----------모범답안----------
sub=1
total=0
while sub<=5:
score=int(input(str(sub)+'번째 점수 입력: '))
if (score>=0 and score<=100):
total=total+score
print(sub, "번째 성적:",score)
sub=sub+1
else:
print('유효한 성적이 아닙니다.')
print('총점: ', total)
print('평균: ', total/5) | [
"68191916+parka01@users.noreply.github.com"
] | 68191916+parka01@users.noreply.github.com |
0a7355f1e458c39857cd28e18a0a5d8508a9bf0c | 3ff1c245d945acf82e48f388d2457204e202275f | /desafio/migrations/0010_auto_20201020_0921.py | 5d4b58297cbbe8d0e9195a30c4618331810f70fb | [] | no_license | rauldosS/desafio_compiladores | 075e7dcb3a167d20d71928727db6c1cb500e23af | da01adf41c47dafd50b1487bb4ad8d27c4f2d199 | refs/heads/main | 2023-01-03T09:13:18.990618 | 2020-10-29T01:25:59 | 2020-10-29T01:25:59 | 305,174,524 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | # Generated by Django 3.1.2 on 2020-10-20 12:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('desafio', '0009_auto_20201020_0858'),
]
operations = [
migrations.AlterModelOptions(
name='caracteres',
options={'ordering': ('id', 'caractere'), 'verbose_name': 'Caractere', 'verbose_name_plural': 'Caracteres'},
),
migrations.AlterModelOptions(
name='linhas',
options={'ordering': ('id', 'linha'), 'verbose_name': 'Linha', 'verbose_name_plural': 'Linhas'},
),
migrations.AddField(
model_name='modelformarquivo',
name='versao',
field=models.IntegerField(default=1, verbose_name='Versao'),
preserve_default=False,
),
]
| [
"48498755+rauldosS@users.noreply.github.com"
] | 48498755+rauldosS@users.noreply.github.com |
9a618c0246162b15038697d9356316cb78955ec6 | fc67dc5e438fe3c7db414a1e54df45d4f4cad7b3 | /devel/lib/python2.7/dist-packages/myprocess/srv/_myprocess_position.py | 5da271bc2f8c7dc39f6fef9116397cfbc21ffd8f | [] | no_license | LizhiyuanBest/myros | 2843b6b3701940bf10d8a0391b5a45670c1c48a0 | 31e7c82ef74ba84c6078253d802b02ffe303cea8 | refs/heads/master | 2022-11-12T09:05:00.439267 | 2020-06-28T02:30:41 | 2020-06-28T02:30:41 | 275,491,444 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,032 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from myprocess/myprocess_positionRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import std_msgs.msg
class myprocess_positionRequest(genpy.Message):
_md5sum = "0fce35bd9f5b27a63eb9b0e831759a0b"
_type = "myprocess/myprocess_positionRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """std_msgs/String name
================================================================================
MSG: std_msgs/String
string data
"""
__slots__ = ['name']
_slot_types = ['std_msgs/String']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
name
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(myprocess_positionRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.name is None:
self.name = std_msgs.msg.String()
else:
self.name = std_msgs.msg.String()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.name.data
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.name is None:
self.name = std_msgs.msg.String()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.name.data = str[start:end].decode('utf-8')
else:
self.name.data = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.name.data
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.name is None:
self.name = std_msgs.msg.String()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.name.data = str[start:end].decode('utf-8')
else:
self.name.data = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from myprocess/myprocess_positionResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import std_msgs.msg
class myprocess_positionResponse(genpy.Message):
_md5sum = "a8a649bfeb277adfa469bbcaeb9c828b"
_type = "myprocess/myprocess_positionResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """std_msgs/Float64MultiArray result
================================================================================
MSG: std_msgs/Float64MultiArray
# Please look at the MultiArrayLayout message definition for
# documentation on all multiarrays.
MultiArrayLayout layout # specification of data layout
float64[] data # array of data
================================================================================
MSG: std_msgs/MultiArrayLayout
# The multiarray declares a generic multi-dimensional array of a
# particular data type. Dimensions are ordered from outer most
# to inner most.
MultiArrayDimension[] dim # Array of dimension properties
uint32 data_offset # padding elements at front of data
# Accessors should ALWAYS be written in terms of dimension stride
# and specified outer-most dimension first.
#
# multiarray(i,j,k) = data[data_offset + dim_stride[1]*i + dim_stride[2]*j + k]
#
# A standard, 3-channel 640x480 image with interleaved color channels
# would be specified as:
#
# dim[0].label = "height"
# dim[0].size = 480
# dim[0].stride = 3*640*480 = 921600 (note dim[0] stride is just size of image)
# dim[1].label = "width"
# dim[1].size = 640
# dim[1].stride = 3*640 = 1920
# dim[2].label = "channel"
# dim[2].size = 3
# dim[2].stride = 3
#
# multiarray(i,j,k) refers to the ith row, jth column, and kth channel.
================================================================================
MSG: std_msgs/MultiArrayDimension
string label # label of given dimension
uint32 size # size of given dimension (in type units)
uint32 stride # stride of given dimension"""
__slots__ = ['result']
_slot_types = ['std_msgs/Float64MultiArray']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
result
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(myprocess_positionResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.result is None:
self.result = std_msgs.msg.Float64MultiArray()
else:
self.result = std_msgs.msg.Float64MultiArray()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
length = len(self.result.layout.dim)
buff.write(_struct_I.pack(length))
for val1 in self.result.layout.dim:
_x = val1.label
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1
buff.write(_get_struct_2I().pack(_x.size, _x.stride))
buff.write(_get_struct_I().pack(self.result.layout.data_offset))
length = len(self.result.data)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.result.data))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.result is None:
self.result = std_msgs.msg.Float64MultiArray()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.result.layout.dim = []
for i in range(0, length):
val1 = std_msgs.msg.MultiArrayDimension()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.label = str[start:end].decode('utf-8')
else:
val1.label = str[start:end]
_x = val1
start = end
end += 8
(_x.size, _x.stride,) = _get_struct_2I().unpack(str[start:end])
self.result.layout.dim.append(val1)
start = end
end += 4
(self.result.layout.data_offset,) = _get_struct_I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.result.data = struct.unpack(pattern, str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
length = len(self.result.layout.dim)
buff.write(_struct_I.pack(length))
for val1 in self.result.layout.dim:
_x = val1.label
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1
buff.write(_get_struct_2I().pack(_x.size, _x.stride))
buff.write(_get_struct_I().pack(self.result.layout.data_offset))
length = len(self.result.data)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.result.data.tostring())
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.result is None:
self.result = std_msgs.msg.Float64MultiArray()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.result.layout.dim = []
for i in range(0, length):
val1 = std_msgs.msg.MultiArrayDimension()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.label = str[start:end].decode('utf-8')
else:
val1.label = str[start:end]
_x = val1
start = end
end += 8
(_x.size, _x.stride,) = _get_struct_2I().unpack(str[start:end])
self.result.layout.dim.append(val1)
start = end
end += 4
(self.result.layout.data_offset,) = _get_struct_I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.result.data = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
class myprocess_position(object):
_type = 'myprocess/myprocess_position'
_md5sum = '2020962a1bb2cd737e928d23469d8479'
_request_class = myprocess_positionRequest
_response_class = myprocess_positionResponse
| [
"LizhiyuanBest@163.com"
] | LizhiyuanBest@163.com |
3b37f885070ec7ead1835b331de132076184f910 | b8ab0e1ac2634741a05e5fef583585b597a6cdcf | /wsltools/utils/faker/providers/person/ro_RO/__init__.py | 3195455b26659df13684aece98f9e1d6384fddda | [
"MIT"
] | permissive | Symbo1/wsltools | be99716eac93bfc270a5ef0e47769290827fc0c4 | 0b6e536fc85c707a1c81f0296c4e91ca835396a1 | refs/heads/master | 2022-11-06T16:07:50.645753 | 2020-06-30T13:08:00 | 2020-06-30T13:08:00 | 256,140,035 | 425 | 34 | MIT | 2020-04-16T14:10:45 | 2020-04-16T07:22:21 | Python | UTF-8 | Python | false | false | 9,215 | py | # -*- coding: UTF-8 -*-
from __future__ import unicode_literals
from .. import Provider as PersonProvider
class Provider(PersonProvider):
formats_female = (
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{last_name}}',
'{{first_name_female}} {{first_name_female}} {{last_name}}',
)
formats_male = (
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{last_name}}',
'{{first_name_male}} {{first_name_male}} {{last_name}}',
)
# sources: https://ro.wikipedia.org/wiki/List%C4%83_de_prenume_rom%C3%A2ne%C8%99ti
first_names_female = (
'Ada', 'Adela', 'Adelaida', 'Adelina', 'Adina', 'Adriana', 'Agata', 'Aglaia', 'Agripina', 'Aida', 'Alberta',
'Albertina', 'Alexandra', 'Alexandrina', 'Alice', 'Alida', 'Alina', 'Alis', 'Alma', 'Amalia', 'Amanda',
'Amelia',
'Ana', 'Anabela', 'Anaida', 'Anamaria', 'Anastasia', 'Anca', 'Ancuța', 'Anda', 'Andra', 'Andrada', 'Andreea',
'Anemona', 'Aneta', 'Angela', 'Anghelina', 'Anica', 'Anișoara', 'Antoaneta', 'Antonela', 'Antonia', 'Anuța',
'Ariadna', 'Ariana', 'Arina', 'Aristița', 'Artemisa', 'Astrid', 'Atena', 'Augustina', 'Aura', 'Aurelia',
'Aureliana', 'Aurica', 'Aurora', 'Axenia', 'Beatrice', 'Betina', 'Bianca', 'Blanduzia', 'Bogdana', 'Brândușa',
'Camelia', 'Carina', 'Carla', 'Carmen', 'Carmina', 'Carolina', 'Casandra', 'Casiana', 'Caterina', 'Catinca',
'Catrina', 'Catrinel', 'Cătălina', 'Cecilia', 'Celia', 'Cerasela', 'Cezara', 'Cipriana', 'Clara', 'Clarisa',
'Claudia', 'Clementina', 'Cleopatra', 'Codrina', 'Codruța', 'Constanța', 'Constantina', 'Consuela', 'Coralia',
'Corina', 'Cornelia', 'Cosmina', 'Crenguța', 'Crina', 'Cristina', 'Daciana', 'Dafina', 'Daiana', 'Dalia',
'Dana',
'Daniela', 'Daria', 'Dariana', 'Delia', 'Demetra', 'Denisa', 'Despina', 'Diana', 'Dida', 'Didina', 'Dimitrina',
'Dina', 'Dochia', 'Doina', 'Domnica', 'Dora', 'Doriana', 'Dorina', 'Dorli', 'Draga', 'Dumitra', 'Dumitrana',
'Ecaterina', 'Eftimia', 'Elena', 'Eleonora', 'Eliana', 'Elisabeta', 'Elisaveta', 'Eliza', 'Elodia', 'Elvira',
'Emanuela', 'Emilia', 'Erica', 'Estera', 'Eufrosina', 'Eugenia', 'Eusebia', 'Eva', 'Evanghelina', 'Evelina',
'Fabia', 'Fabiana', 'Felicia', 'Filofteia', 'Fiona', 'Flavia', 'Floare', 'Floarea', 'Flora', 'Florența',
'Florentina', 'Floriana', 'Florica', 'Florina', 'Francesca', 'Frusina', 'Gabriela', 'Geanina', 'Gențiana',
'Georgeta', 'Georgia', 'Georgiana', 'Geta', 'Gherghina', 'Gianina', 'Gina', 'Giorgiana', 'Grațiana', 'Grațiela',
'Henrieta', 'Heracleea', 'Hortensia', 'Iasmina', 'Ica', 'Ileana', 'Ilinca', 'Ilona', 'Ina', 'Ioana', 'Ioanina',
'Iolanda', 'Ionela', 'Ionelia', 'Iosefina', 'Iridenta', 'Irina', 'Iris', 'Isabela', 'Iulia', 'Iuliana',
'Iustina',
'Ivona', 'Izabela', 'Jana', 'Janeta', 'Janina', 'Jasmina', 'Jeana', 'Julia', 'Julieta', 'Larisa', 'Laura',
'Laurenția', 'Lavinia', 'Lăcrămioara', 'Leana', 'Lelia', 'Leontina', 'Leopoldina', 'Letiția', 'Lia', 'Liana',
'Lidia', 'Ligia', 'Lili', 'Liliana', 'Lioara', 'Livia', 'Loredana', 'Lorelei', 'Lorena', 'Luana', 'Lucia',
'Luciana', 'Lucreția', 'Ludmila', 'Ludovica', 'Luiza', 'Luminița', 'Magdalena', 'Maia', 'Malvina', 'Manuela',
'Mara', 'Marcela', 'Marcheta', 'Marga', 'Margareta', 'Maria', 'Mariana', 'Maricica', 'Marilena', 'Marina',
'Marinela', 'Marioara', 'Marta', 'Matilda', 'Mădălina', 'Mălina', 'Mărioara', 'Măriuca', 'Melania', 'Melina',
'Mihaela', 'Milena', 'Mina', 'Minodora', 'Mioara', 'Mirabela', 'Mirela', 'Mirona', 'Miruna', 'Mona', 'Monalisa',
'Monica', 'Nadia', 'Narcisa', 'Natalia', 'Natașa', 'Nicoleta', 'Niculina', 'Nidia', 'Noemi', 'Nora', 'Norica',
'Oana', 'Octavia', 'Octaviana', 'Ofelia', 'Olga', 'Olimpia', 'Olivia', 'Ortansa', 'Otilia', 'Ozana', 'Pamela',
'Paraschiva', 'Patricia', 'Paula', 'Paulica', 'Paulina', 'Petronela', 'Petruța', 'Pompilia', 'Profira', 'Rada',
'Rafila', 'Raluca', 'Ramona', 'Rebeca', 'Renata', 'Rica', 'Roberta', 'Robertina', 'Rodica', 'Romanița',
'Romina',
'Roxana', 'Roxelana', 'Roza', 'Rozalia', 'Ruxanda', 'Ruxandra', 'Sabina', 'Sabrina', 'Safta', 'Salomea',
'Sanda',
'Saveta', 'Savina', 'Sânziana', 'Semenica', 'Severina', 'Sidonia', 'Silvana', 'Silvia', 'Silviana', 'Simina',
'Simona', 'Smaranda', 'Sofia', 'Sonia', 'Sorana', 'Sorina', 'Speranța', 'Stana', 'Stanca', 'Stela', 'Steliana',
'Steluța', 'Suzana', 'Svetlana', 'Ștefana', 'Ștefania', 'Tamara', 'Tania', 'Tatiana', 'Teea', 'Teodora',
'Teodosia',
'Teona', 'Tiberia', 'Timea', 'Tinca', 'Tincuța', 'Tudora', 'Tudorița', 'Tudosia', 'Valentina', 'Valeria',
'Vanesa',
'Varvara', 'Vasilica', 'Venera', 'Vera', 'Veronica', 'Veta', 'Vicenția', 'Victoria', 'Violeta', 'Viorela',
'Viorica', 'Virginia', 'Viviana', 'Vlădelina', 'Voichița', 'Xenia', 'Zaharia', 'Zamfira', 'Zaraza', 'Zenobia',
'Zenovia', 'Zina', 'Zoe')
first_names_male = (
'Achim', 'Adam', 'Adelin', 'Adi', 'Adonis', 'Adrian', 'Agnos', 'Albert', 'Aleodor', 'Alex', 'Alexandru',
'Alexe', 'Alin', 'Alistar', 'Amedeu', 'Amza', 'Anatolie', 'Andrei', 'Andrian', 'Angel', 'Anghel', 'Antim',
'Anton',
'Antonie', 'Antoniu', 'Arian', 'Aristide', 'Arsenie', 'Augustin', 'Aurel', 'Aurelian', 'Aurică', 'Avram',
'Axinte',
'Barbu', 'Bartolomeu', 'Basarab', 'Bănel', 'Bebe', 'Beniamin', 'Benone', 'Bernard', 'Bogdan', 'Brăduț', 'Bucur',
'Caius', 'Camil', 'Cantemir', 'Carol', 'Casian', 'Cazimir', 'Călin', 'Cătălin', 'Cedrin', 'Cezar', 'Ciprian',
'Claudiu', 'Codin', 'Codrin', 'Codruț', 'Constantin', 'Cornel', 'Corneliu', 'Corvin', 'Cosmin', 'Costache',
'Costel', 'Costin', 'Crin', 'Cristea', 'Cristian', 'Cristobal', 'Cristofor', 'Dacian', 'Damian', 'Dan',
'Daniel',
'Darius', 'David', 'Decebal', 'Denis', 'Dinu', 'Dominic', 'Dorel', 'Dorian', 'Dorin', 'Dorinel', 'Doru',
'Dragoș',
'Ducu', 'Dumitru', 'Edgar', 'Edmond', 'Eduard', 'Eftimie', 'Emanoil', 'Emanuel', 'Emanuil', 'Emil', 'Emilian',
'Eremia', 'Eric', 'Ernest', 'Eugen', 'Eusebiu', 'Eustațiu', 'Fabian', 'Felix', 'Filip', 'Fiodor', 'Flaviu',
'Florea', 'Florentin', 'Florian', 'Florin', 'Francisc', 'Frederic', 'Gabi', 'Gabriel', 'Gelu', 'George',
'Georgel',
'Georgian', 'Ghenadie', 'Gheorghe', 'Gheorghiță', 'Ghiță', 'Gică', 'Gicu', 'Giorgian', 'Grațian', 'Gregorian',
'Grigore', 'Haralamb', 'Haralambie', 'Horațiu', 'Horea', 'Horia', 'Iacob', 'Iancu', 'Ianis', 'Ieremia',
'Ilarie',
'Ilarion', 'Ilie', 'Inocențiu', 'Ioan', 'Ion', 'Ionel', 'Ionică', 'Ionuț', 'Iosif', 'Irinel', 'Iulian', 'Iuliu',
'Iurie', 'Iustin', 'Iustinian', 'Ivan', 'Jan', 'Jean', 'Jenel', 'Ladislau', 'Lascăr', 'Laurențiu', 'Laurian',
'Lazăr', 'Leonard', 'Leontin', 'Leordean', 'Lică', 'Liviu', 'Lorin', 'Luca', 'Lucențiu', 'Lucian', 'Lucrețiu',
'Ludovic', 'Manole', 'Marcel', 'Marcu', 'Marian', 'Marin', 'Marius', 'Martin', 'Matei', 'Maxim', 'Maximilian',
'Mădălin', 'Mihai', 'Mihail', 'Mihnea', 'Mircea', 'Miron', 'Mitică', 'Mitruț', 'Mugur', 'Mugurel', 'Nae',
'Narcis',
'Nechifor', 'Nelu', 'Nichifor', 'Nicoară', 'Nicodim', 'Nicolae', 'Nicolaie', 'Nicu', 'Niculiță', 'Nicușor',
'Nicuță', 'Norbert', 'Norman', 'Octav', 'Octavian', 'Octaviu', 'Olimpian', 'Olimpiu', 'Oliviu', 'Ovidiu',
'Pamfil',
'Panagachie', 'Panait', 'Paul', 'Pavel', 'Pătru', 'Petre', 'Petrică', 'Petrișor', 'Petru', 'Petruț', 'Pleșu',
'Pompiliu', 'Radu', 'Rafael', 'Rareș', 'Raul', 'Răducu', 'Răzvan', 'Relu', 'Remus', 'Robert', 'Romeo',
'Romulus',
'Sabin', 'Sandu', 'Sandu', 'Sava', 'Sebastian', 'Sergiu', 'Sever', 'Severin', 'Silvian', 'Silviu', 'Simi',
'Simion',
'Sinică', 'Sorin', 'Stan', 'Stancu', 'Stelian', 'Șerban', 'Ștefan', 'Teodor', 'Teofil', 'Teohari', 'Theodor',
'Tiberiu', 'Timotei', 'Titus', 'Todor', 'Toma', 'Traian', 'Tudor', 'Valentin', 'Valeriu', 'Valter', 'Vasile',
'Vasilică', 'Veniamin', 'Vicențiu', 'Victor', 'Vincențiu', 'Viorel', 'Visarion', 'Vlad', 'Vladimir', 'Vlaicu',
'Voicu', 'Zamfir', 'Zeno')
first_names = first_names_female + first_names_male
# sources: https://ro.wikipedia.org/wiki/Lista_celor_mai_uzuale_nume_de_familie#Rom%C3%A2nia
last_names = (
'Aanei', 'Ababei', 'Albu', 'Ardelean', 'Barbu', 'Cristea', 'Diaconescu', 'Diaconu', 'Dima', 'Dinu', 'Dobre',
'Dochioiu', 'Dumitrescu', 'Eftimie', 'Ene', 'Florea', 'Georgescu', 'Gheorghiu', 'Ionescu', 'Ioniță',
'Manole', 'Marin', 'Mazilescu', 'Mocanu', 'Nemeș', 'Nistor', 'Nistor', 'Niță', 'Oprea', 'Pop',
'Popa', 'Popescu', 'Preda', 'Pușcașu', 'Stan', 'Stancu', 'Stoica', 'Stănescu', 'Suciu', 'Tabacu', 'Toma',
'Tomescu', 'Tudor', 'Voinea')
| [
"tr3jer@gmail.com"
] | tr3jer@gmail.com |
40167e3cf9afb33c2cca023593a06ae0fd03b096 | 6809b17768d492866abf102318a5dcd2a03e8239 | /learnpythonthehardway/Binary-Search-Tree-to-Greater-Sum-Tree-5050.py | aa726b1e2475bad48db5c7abcd603bb4bcc3617d | [] | no_license | dgpllc/leetcode-python | 28428430a65d26d22f33d8c7e41089f6a783743a | 340ae58fb65b97aa6c6ab2daa8cbd82d1093deae | refs/heads/master | 2020-06-01T14:42:17.162579 | 2019-06-03T14:42:43 | 2019-06-03T14:42:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,450 | py | # Given the root of a binary search tree with distinct values, modify it so that every node has a new value equal to
# the sum of the values of the original tree that are greater than or equal to node.val.
#
# As a reminder, a binary search tree is a tree that satisfies these constraints:
#
# The left subtree of a node contains only nodes with keys less than the node's key.
# The right subtree of a node contains only nodes with keys greater than the node's key.
# Both the left and right subtrees must also be binary search trees.
#
#
# Example 1:
#
#
#
# Input: [4,1,6,0,2,5,7,null,null,null,3,null,null,null,8]
# Output: [30,36,21,36,35,26,15,null,null,null,33,null,null,null,8]
#
#
# Note:
#
# The number of nodes in the tree is between 1 and 100.
# Each node will have value between 0 and 100.
# The given tree is a binary search tree.
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def bstToGst(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
arr = []
def dfs(node):
if not node:
return
dfs(node.left)
arr.append(node)
dfs(node.right)
dfs(root)
for i in xrange(len(arr) - 2, -1, -1):
arr[i].val += arr[i + 1].val
return root
| [
"xin_wei@intuit.com"
] | xin_wei@intuit.com |
522f0be76de3348a09f76d4c24dce5e68bc11941 | 3ccd609f68016aad24829b8dd3cdbb535fb0ff6d | /python/bpy/types/CyclesLightSettings.py | b90e2afb63612b6f9fcee2bf33dd98e67c9fb350 | [] | no_license | katharostech/blender_externs | 79b2eed064fd927e3555aced3e2eb8a45840508e | fdf7f019a460de0fe7e62375c1c94f7ab0e9f68d | refs/heads/master | 2020-04-11T14:00:29.393478 | 2018-10-01T00:40:51 | 2018-10-01T00:40:51 | 161,838,212 | 1 | 1 | null | 2018-12-14T20:41:32 | 2018-12-14T20:41:32 | null | UTF-8 | Python | false | false | 160 | py | class CyclesLightSettings:
cast_shadow = None
is_portal = None
max_bounces = None
samples = None
use_multiple_importance_sampling = None
| [
"troyedwardsjr@gmail.com"
] | troyedwardsjr@gmail.com |
66714611f024ba03584d28323952e6ecab755f56 | d5d94c992d0596080ba694c518dfdb58d3490847 | /0017/my_answer.py | d10ac9df9d6c0541772faa4ae39cc960e14d9e31 | [] | no_license | calgagi/leetcode | 1bf24b750e44c2c893935983e5d88e0f071d9f2d | 431aba979d92e331f2f92a07eb80167a823a49bd | refs/heads/master | 2022-11-17T11:26:01.596496 | 2020-07-19T06:56:04 | 2020-07-19T06:56:04 | 276,207,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 534 | py | class Solution:
def letterCombinations(self, digits: str) -> List[str]:
self.nums = ["", "", "abc", "def", "ghi", "jkl", "mno", "pqrs", "tuv", "wxyz"]
self.r = []
if len(digits) == 0:
return self.r
self.backtrack(digits, "")
return self.r
def backtrack(self, digits: str, curr: str) -> None:
if len(digits) == 0:
self.r.append(curr)
return
for c in self.nums[int(digits[0])]:
self.backtrack(digits[1:], curr + c)
| [
"calgagi@gmail.com"
] | calgagi@gmail.com |
406be3af63c84a41d0e928882a6e6c507a54bd59 | 3b1242e49b72ecc5d7e80c2a0d833a5876affd29 | /src/crs_talker.py | 3f79150be2dedc84a0739e3acfd4766f845a4347 | [] | no_license | raysmith619/queen_bee_net | 09a29ffa11b9c12cc53506d9b7b7c3d7bad35854 | 6aaf8bf539271d57cb501d58a3ff41eabc0e89c7 | refs/heads/main | 2023-01-08T05:00:25.755818 | 2020-11-03T22:42:39 | 2020-11-03T22:42:39 | 303,788,068 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,631 | py | # crs_talker.py 06Oct2020 crs, Paired down from crs_drone_bee, drone_bee
"""
UDP - chat client (drone bee)
Usage: <pgm> [hostname/ip]
default: localhost
"our_name": uses gethostname()
Uses tkinter to graphically display talker with
messages from other talkers
Sends messages to host
"""
import socket
import sys
import os
import tkinter as tk
port = 12345 # com port
sock_timeout = 5
print(sys.argv)
pgm = sys.argv[0]
our_name = socket.gethostname()
our_ip = socket.gethostbyname(our_name)
print(f"{pgm}\n run on {our_name} {our_ip}")
qbinfo = "qb.txt" # Get host, info from file
if os.path.exists(qbinfo):
with open(qbinfo) as qbif:
qbinfo_line = qbif.read()
host, port = qbinfo_line.split()
port = int(port)
if len(sys.argv) > 1:
host_name = sys.argv[1]
if host_name == "our_name":
host_name = our_name # Used internal name
host = host_name
if len(sys.argv) > 2:
port = sys.argv[2]
port = int(port)
host_ip = socket.gethostbyname(host)
print(f"host:{host} IP: {host_ip} port: {port}")
family = socket.AF_INET
sock_type = socket.SOCK_DGRAM
sock = socket.socket(family, sock_type)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(sock_timeout)
print("Press the space key to Enter msg")
def print_if_msg():
ck_time = .01
try:
sock.settimeout(ck_time)
rep_data, rep_addr = sock.recvfrom(1024)
rep_msg = rep_data.decode("utf-8")
from_loc, msg = rep_msg.split(maxsplit=1)
print(f"{from_loc}({rep_addr}): {msg}")
finally:
sock.settimeout(sock_timeout)
mw = tk.Tk()
msg_entry = None # Set when ready
def msg_proc():
print("entry change")
msg = msg_entry.get
data = bytes(msg, "utf-8")
sock.sendto(data, (host, port)) # Sending message to UDP server
sock.settimeout(sock_timeout)
try:
rep_data, rep_addr = sock.recvfrom(1024)
rep_msg = rep_data.decode("utf-8")
print(f"\tReply:{rep_msg} from {rep_addr}")
except:
print(f"\tNo reply after {sock_timeout} seconds")
ml_frame = tk.Frame(mw)
ml_frame.pack(side=tk.TOP)
ml_label = tk.Label(ml_frame, text="Enter Message")
ml_label.pack(side = tk.LEFT)
ml_send_btn = tk.Button(ml_frame, text="Send")
ml_send_btn.pack(side=tk.RIGHT)
msg_entry = tk.Entry(mw, bd =5)
msg_entry.pack(side = tk.TOP)
msg_label = tk.Label(mw, text="From Others")
msg_label.pack(side = tk.TOP)
msg_entry = tk.Entry(mw, bd=5)
msg_entry.pack(side=tk.TOP)
tk.mainloop()
| [
"noreply@github.com"
] | raysmith619.noreply@github.com |
c916994fb5b664d9fc25366000d06bc6efbd8769 | 0f2b08b31fab269c77d4b14240b8746a3ba17d5e | /onnxruntime/python/tools/quantization/quant_utils.py | 4d5bcca29618fc4ec2c75b8ca10dedf4147d693f | [
"MIT"
] | permissive | microsoft/onnxruntime | f75aa499496f4d0a07ab68ffa589d06f83b7db1d | 5e747071be882efd6b54d7a7421042e68dcd6aff | refs/heads/main | 2023-09-04T03:14:50.888927 | 2023-09-02T07:16:28 | 2023-09-02T07:16:28 | 156,939,672 | 9,912 | 2,451 | MIT | 2023-09-14T21:22:46 | 2018-11-10T02:22:53 | C++ | UTF-8 | Python | false | false | 22,956 | py | import logging
import os
import tempfile
from enum import Enum
from pathlib import Path
import numpy
import onnx
from onnx import ModelProto, TensorProto, external_data_helper
from onnx import onnx_pb as onnx_proto
from onnx.helper import make_graph, make_model, make_node, make_tensor_value_info
from onnx.reference import ReferenceEvaluator
from onnxruntime import GraphOptimizationLevel, InferenceSession, SessionOptions
try:
from onnx.reference.custom_element_types import float8e4m3fn
except ImportError:
float8e4m3fn = None
__producer__ = "onnx.quantize"
__version__ = "0.1.0"
onnx_domain = "ai.onnx"
ms_domain = "com.microsoft"
QUANT_OP_NAME = "QuantizeLinear"
QUANT_INPUT_SUFFIX = "_QuantizeLinear_Input"
DEQUANT_OP_NAME = "DequantizeLinear"
DEQUANT_OUTPUT_SUFFIX = "_DequantizeLinear_Output"
TENSOR_NAME_QUANT_SUFFIX = "_quantized"
FLOAT8_DISTRIBUTIONS = {}
type_to_name = {getattr(TensorProto, k): k for k in dir(TensorProto) if isinstance(getattr(TensorProto, k), int)}
# Quantization mode
# IntegerOps: Use IntegerOps in quantized model. Only ConvInteger and MatMulInteger ops are supported now.
# QLinearOps: Use QLinearOps in quantized model. Only QLinearConv and QLinearMatMul ops are supported now.
class QuantizationMode(Enum):
IntegerOps = 0
QLinearOps = 1
def __str__(self):
return self.name
@staticmethod
def from_string(mode):
try:
return QuantizationMode[mode]
except KeyError:
raise ValueError() # noqa: B904
class QuantizedValueType(Enum):
Input = 0
Initializer = 1
def __str__(self):
return self.name
@staticmethod
def from_string(v):
try:
return QuantizedValueType[v]
except KeyError:
raise ValueError() # noqa: B904
class QuantType(Enum):
QInt8 = 0
QUInt8 = 1
QFLOAT8E4M3FN = 2
def __str__(self):
return self.name
@staticmethod
def from_string(t):
try:
return QuantType[t]
except KeyError:
raise ValueError() # noqa: B904
@property
def tensor_type(self):
if self == QuantType.QInt8:
return TensorProto.INT8
if self == QuantType.QUInt8:
return TensorProto.UINT8
if self == QuantType.QFLOAT8E4M3FN:
return TensorProto.FLOAT8E4M3FN
raise ValueError(f"Unexpected value qtype={self!r}.")
class QuantFormat(Enum):
QOperator = 0
QDQ = 1
def __str__(self):
return self.name
@staticmethod
def from_string(format):
try:
return QuantFormat[format]
except KeyError:
raise ValueError() # noqa: B904
ONNX_TYPE_TO_NP_TYPE = {
onnx_proto.TensorProto.INT8: numpy.dtype("int8"),
onnx_proto.TensorProto.UINT8: numpy.dtype("uint8"),
onnx_proto.TensorProto.FLOAT8E4M3FN: float8e4m3fn,
}
def quantize_nparray(qType, arr, scale, zero_point, low=None, high=None):
assert qType in ONNX_TYPE_TO_NP_TYPE, f"Unexpected data type {qType} requested. Only INT8 and UINT8 are supported."
if qType in (
onnx_proto.TensorProto.FLOAT8E4M3FN,
onnx_proto.TensorProto.FLOAT8E4M3FNUZ,
onnx_proto.TensorProto.FLOAT8E5M2,
onnx_proto.TensorProto.FLOAT8E5M2FNUZ,
):
if zero_point != 0:
raise NotImplementedError(f"zero_point is expected to be null for float 8 not {zero_point!r}.")
onnx_model = make_model(
make_graph(
[
make_node(
"Constant", [], ["zero_point"], value=onnx.helper.make_tensor("zero_point", qType, [], [0])
),
make_node("QuantizeLinear", ["X", "scale", "zero_point"], ["Y"]),
],
"qu",
[
make_tensor_value_info("X", TensorProto.FLOAT, None),
make_tensor_value_info("scale", TensorProto.FLOAT, None),
],
[make_tensor_value_info("Y", qType, None)],
)
)
ref = ReferenceEvaluator(onnx_model)
return ref.run(None, {"X": arr.astype(numpy.float32), "scale": scale.astype(numpy.float32)})[0]
else:
dtype = ONNX_TYPE_TO_NP_TYPE[qType]
cliplow = max(0 if dtype == numpy.uint8 else -127, -127 if low is None else low)
cliphigh = min(255 if dtype == numpy.uint8 else 127, 255 if high is None else high)
arr_fp32 = numpy.asarray((arr.astype(numpy.float32) / scale).round() + zero_point)
numpy.clip(arr_fp32, cliplow, cliphigh, out=arr_fp32)
return arr_fp32.astype(dtype)
def compute_scale_zp(rmin, rmax, qmin, qmax, symmetric=False):
"""Calculate the scale s and zero point z for the quantization relation
r = s(q-z), where r are the original values and q are the corresponding
quantized values.
r and z are calculated such that every value within [rmin,rmax] has an
approximate representation within [qmin,qmax]. In addition, qmin <= z <=
qmax is enforced. If the symmetric flag is set to True, the interval
[rmin,rmax] is symmetrized to [-absmax, +absmax], where
absmax = max(abs(rmin), abs(rmax)).
:parameter rmin: minimum value of r
:parameter rmax: maximum value of r
:parameter qmin: minimum value representable by the target quantization data type
:parameter qmax: maximum value representable by the target quantization data type
:return: zero and scale [z, s]
"""
if qmin > 0 or qmax < 0:
raise ValueError(f"qmin and qmax must meet requirement: qmin <= 0 <= qmax while qmin:{qmin}, qmmax:{qmax}")
# Adjust rmin and rmax such that 0 is included in the range. This is
# required to make sure zero can be represented by the quantization data
# type (i.e. to make sure qmin <= zero_point <= qmax)
rmin = min(rmin, 0)
rmax = max(rmax, 0)
if symmetric:
absmax = max(abs(rmin), abs(rmax))
rmin = -absmax
rmax = +absmax
scale = (rmax - rmin) / float(qmax - qmin)
if scale < numpy.finfo(numpy.float32).tiny:
scale = 1.0
zero_point = 0
else:
zero_point = round(qmin - rmin / scale)
return [zero_point, scale]
def compute_scale_zp_float8(element_type, std):
"""Calculate the scale s for a float8 type (E4M3FN).
The function assumes the coefficient distribution and the float 8
distribution are similar to two gaussian laws.
:return: zero and scale [z, s]
More details in notebook `quantization_fp8.ipynb
<https://github.com/microsoft/onnxruntime/blob/main/docs/python/notebooks/quantization_fp8.ipynb>`_.
"""
if element_type not in FLOAT8_DISTRIBUTIONS:
if element_type == TensorProto.FLOAT8E4M3FN:
from onnx.numpy_helper import float8e4m3_to_float32
all_values = [float8e4m3_to_float32(i) for i in range(0, 256)]
values = numpy.array(
[f for f in all_values if not numpy.isnan(f) and not numpy.isinf(f)], dtype=numpy.float32
)
else:
raise ValueError(f"Quantization to element_type={element_type} not implemented.")
FLOAT8_DISTRIBUTIONS[element_type] = values
std_f8 = numpy.std(FLOAT8_DISTRIBUTIONS[element_type])
zero = 0
scale = std / std_f8
return [zero, scale]
def quantize_data(data, qType, symmetric, reduce_range=False):
"""
:param data: data to quantize
:param qType: data type to quantize to. Supported types UINT8 and INT8
:param symmetric: whether symmetric quantization is used or not. This is applied to INT8.
:return: minimum, maximum, zero point, scale, and quantized weights
To pack weights, we compute a linear transformation
- when data `type == uint8` mode, from `[rmin, rmax]` -> :math:`[0, 2^{b-1}]` and
- when data `type == int8`, from `[-m , m]` -> :math:`[-(2^{b-1}-1), 2^{b-1}-1]` where
`m = max(abs(rmin), abs(rmax))`
and add necessary intermediate nodes to trasnform quantized weight to full weight using the equation
:math:`r = S(q-z)`, where
- *r*: real original value
- *q*: quantized value
- *S*: scale
- *z*: zero point
"""
rmin = 0
rmax = 0
zero_point = 0
scale = 1.0
if len(data):
rmin = min(data)
rmax = max(data)
if qType == TensorProto.FLOAT8E4M3FN:
if reduce_range:
raise RuntimeError("Unsupported option reduce_range=True for float 8.")
std = numpy.std(data)
zero_point, scale = compute_scale_zp_float8(qType, std)
quantized_data = quantize_nparray(qType, numpy.asarray(data), scale, zero_point)
if any((quantized_data.astype(numpy.uint8).ravel() & 127) == 127):
np_data = numpy.asarray(data)
raise RuntimeError(
f"One of the quantized value is NaN data in [{np_data.min()}, {np_data.max()}], "
f"quantized_data in [{quantized_data.min()}, {quantized_data.max()}]."
)
return rmin, rmax, zero_point, scale, quantized_data
if qType in (TensorProto.INT8, TensorProto.UINT8):
if len(data):
qmin, qmax = get_qmin_qmax_for_qType(qType, reduce_range, symmetric=symmetric)
zero_point, scale = compute_scale_zp(rmin, rmax, qmin, qmax, symmetric)
quantized_data = quantize_nparray(qType, numpy.asarray(data), scale, zero_point)
return rmin, rmax, zero_point, scale, quantized_data
raise ValueError(f"Unexpected value for qType={qType}.")
def get_qmin_qmax_for_qType(qType, reduce_range=False, symmetric=False): # noqa: N802
"""
Return qmin and qmax, the minimum and maximum value representable by the given qType
:parameter qType: onnx.onnx_pb.TensorProto.UINT8 or onnx.onnx_pb.TensorProto.UINT8
:return: qmin, qmax
"""
if qType == onnx_proto.TensorProto.UINT8:
(qmin, qmax) = (0, 127) if reduce_range else (0, 255)
elif qType == onnx_proto.TensorProto.INT8:
if symmetric:
(qmin, qmax) = (-64, 64) if reduce_range else (-127, 127)
else:
(qmin, qmax) = (-64, 64) if reduce_range else (-128, 127)
elif qType == onnx_proto.TensorProto.FLOAT8E4M3FN:
raise NotImplementedError("This function is not implemented for float 8 as not needed.")
else:
raise ValueError(f"Unexpected data type {qType} requested. Only INT8 and UINT8 are supported.")
return qmin, qmax
def get_qrange_for_qType(qType, reduce_range=False, symmetric=False): # noqa: N802
"""
Helper function to get the quantization range for a type.
parameter qType: quantization type.
return: quantization range.
"""
qmin, qmax = get_qmin_qmax_for_qType(qType, reduce_range, symmetric=symmetric)
return qmax - qmin
class QuantizedInitializer:
"""
Represents a linearly quantized weight input from ONNX operators
"""
def __init__(
self,
name,
initializer,
rmins,
rmaxs,
zero_points,
scales,
data=[], # noqa: B006
quantized_data=[], # noqa: B006
axis=None,
):
self.name = name
self.initializer = initializer # TensorProto initializer in ONNX graph
self.rmins = rmins # List of minimum range for each axis
self.rmaxs = rmaxs # List of maximum range for each axis
# 1D tensor of zero points computed for each axis. scalar if axis is empty
self.zero_points = zero_points
self.scales = scales # 1D tensor of scales computed for each axis. scalar if axis is empty
self.data = data # original data from initializer TensorProto
self.quantized_data = quantized_data # weight-packed data from data
# Scalar to specify which dimension in the initializer to weight pack.
self.axis = axis
# If empty, single zero point and scales computed from a single rmin and rmax
class QuantizedValue:
"""
Represents a linearly quantized value (input\\output\\intializer)
"""
def __init__(
self,
name,
new_quantized_name,
scale_name,
zero_point_name,
quantized_value_type,
axis=None,
node_type=None,
node_qtype=None,
):
self.original_name = name
self.q_name = new_quantized_name
self.scale_name = scale_name
self.zp_name = zero_point_name
self.value_type = quantized_value_type
self.axis = axis
self.node_type = node_type
self.node_qtype = node_qtype
class BiasToQuantize:
"""
Represents a bias to be quantized
"""
def __init__(self, bias_name, input_name, weight_name):
self.bias_name = bias_name
self.input_name = input_name
self.weight_name = weight_name
def attribute_to_kwarg(attribute):
"""
Convert attribute to kwarg format for use with onnx.helper.make_node.
:parameter attribute: attribute in AttributeProto format.
:return: attribute in {key: value} format.
"""
if attribute.type == 0:
raise ValueError(f"attribute {attribute.name} does not have type specified.")
# Based on attribute type definitions from AttributeProto
# definition in https://github.com/onnx/onnx/blob/main/onnx/onnx.proto
if attribute.type == 1:
value = attribute.f
elif attribute.type == 2:
value = attribute.i
elif attribute.type == 3:
value = attribute.s
elif attribute.type == 4:
value = attribute.t
elif attribute.type == 5:
value = attribute.g
elif attribute.type == 6:
value = attribute.floats
elif attribute.type == 7:
value = attribute.ints
elif attribute.type == 8:
value = attribute.strings
elif attribute.type == 9:
value = attribute.tensors
elif attribute.type == 10:
value = attribute.graphs
else:
raise ValueError(f"attribute {attribute.name} has unsupported type {attribute.type}.")
return {attribute.name: value}
def find_by_name(item_name, item_list):
"""
Helper function to find item by name in a list.
parameter item_name: name of the item.
parameter item_list: list of items.
return: item if found. None otherwise.
"""
items = [item for item in item_list if item.name == item_name]
return items[0] if len(items) > 0 else None
def get_elem_index(elem_name, elem_list):
"""
Helper function to return index of an item in a node list
"""
elem_idx = -1
for i in range(0, len(elem_list)):
if elem_list[i] == elem_name:
elem_idx = i
return elem_idx
def get_mul_node(inputs, output, name):
"""
Helper function to create a Mul node.
parameter inputs: list of input names.
parameter output: output name.
parameter name: name of the node.
return: Mul node in NodeProto format.
"""
return onnx.helper.make_node("Mul", inputs, [output], name)
def generate_identified_filename(filename: Path, identifier: str) -> Path:
"""
Helper function to generate a identifiable filepath by concatenating the given identifier as a suffix.
"""
return filename.parent.joinpath(filename.stem + identifier + filename.suffix)
def apply_plot(hist, hist_edges):
import sys
import matplotlib.pyplot as plt
import numpy
numpy.set_printoptions(threshold=sys.maxsize)
print("Histogram:")
print(hist)
print("Histogram Edges:")
print(hist_edges)
plt.stairs(hist, hist_edges, fill=True)
plt.xlabel("Tensor value")
plt.ylabel("Counts")
plt.title("Tensor value V.S. Counts")
plt.show()
def write_calibration_table(calibration_cache):
"""
Helper function to write calibration table to files.
"""
import json
import flatbuffers
import onnxruntime.quantization.CalTableFlatBuffers.KeyValue as KeyValue
import onnxruntime.quantization.CalTableFlatBuffers.TrtTable as TrtTable
logging.info(f"calibration cache: {calibration_cache}")
with open("calibration.json", "w") as file:
file.write(json.dumps(calibration_cache)) # use `json.loads` to do the reverse
# Serialize data using FlatBuffers
builder = flatbuffers.Builder(1024)
key_value_list = []
for key in sorted(calibration_cache.keys()):
values = calibration_cache[key]
value = str(max(abs(values[0]), abs(values[1])))
flat_key = builder.CreateString(key)
flat_value = builder.CreateString(value)
KeyValue.KeyValueStart(builder)
KeyValue.KeyValueAddKey(builder, flat_key)
KeyValue.KeyValueAddValue(builder, flat_value)
key_value = KeyValue.KeyValueEnd(builder)
key_value_list.append(key_value)
TrtTable.TrtTableStartDictVector(builder, len(key_value_list))
for key_value in key_value_list:
builder.PrependUOffsetTRelative(key_value)
main_dict = builder.EndVector()
TrtTable.TrtTableStart(builder)
TrtTable.TrtTableAddDict(builder, main_dict)
cal_table = TrtTable.TrtTableEnd(builder)
builder.Finish(cal_table)
buf = builder.Output()
with open("calibration.flatbuffers", "wb") as file:
file.write(buf)
# Deserialize data (for validation)
if os.environ.get("QUANTIZATION_DEBUG", 0) in (1, "1"):
cal_table = TrtTable.TrtTable.GetRootAsTrtTable(buf, 0)
dict_len = cal_table.DictLength()
for i in range(dict_len):
key_value = cal_table.Dict(i)
logging.info(key_value.Key())
logging.info(key_value.Value())
# write plain text
with open("calibration.cache", "w") as file:
for key in sorted(calibration_cache.keys()):
value = calibration_cache[key]
s = key + " " + str(max(abs(value[0]), abs(value[1])))
file.write(s)
file.write("\n")
def smooth_distribution(p, eps=0.0001):
"""Given a discrete distribution (may have not been normalized to 1),
smooth it by replacing zeros with eps multiplied by a scaling factor
and taking the corresponding amount off the non-zero values.
Ref: http://web.engr.illinois.edu/~hanj/cs412/bk3/KL-divergence.pdf
https://github.com//apache/incubator-mxnet/blob/master/python/mxnet/contrib/quantization.py
"""
is_zeros = (p == 0).astype(numpy.float32)
is_nonzeros = (p != 0).astype(numpy.float32)
n_zeros = is_zeros.sum()
n_nonzeros = p.size - n_zeros
if not n_nonzeros:
# raise ValueError('The discrete probability distribution is malformed. All entries are 0.')
return -1
eps1 = eps * float(n_zeros) / float(n_nonzeros)
assert eps1 < 1.0, "n_zeros=%d, n_nonzeros=%d, eps1=%f" % (
n_zeros,
n_nonzeros,
eps1,
)
hist = p.astype(numpy.float32)
hist += eps * is_zeros + (-eps1) * is_nonzeros
assert (hist <= 0).sum() == 0
return hist
def model_has_external_data(model_path: Path):
model = onnx.load(model_path.as_posix(), load_external_data=False)
for intializer in model.graph.initializer:
if external_data_helper.uses_external_data(intializer):
return True
return False
def optimize_model(model_path: Path, opt_model_path: Path):
"""
Generate model that applies graph optimization (constant folding, etc.)
parameter model_path: path to the original onnx model
parameter opt_model_path: path to the optimized onnx model
:return: optimized onnx model
"""
sess_option = SessionOptions()
sess_option.optimized_model_filepath = opt_model_path.as_posix()
sess_option.graph_optimization_level = GraphOptimizationLevel.ORT_ENABLE_BASIC
kwargs = {}
# This will rename constant initializer names, disable it to make test pass.
kwargs["disabled_optimizers"] = ["ConstantSharing"]
_ = InferenceSession(model_path.as_posix(), sess_option, providers=["CPUExecutionProvider"], **kwargs)
def add_pre_process_metadata(model: ModelProto):
"""Tag the model that it went through quantization pre-processing"""
metadata_props = {"onnx.quant.pre_process": "onnxruntime.quant"}
if model.metadata_props:
for prop in model.metadata_props:
metadata_props.update({prop.key: prop.value})
onnx.helper.set_model_props(model, metadata_props)
def model_has_pre_process_metadata(model: ModelProto) -> bool:
"""Check the model whether it went through quantization pre-processing"""
if model.metadata_props:
for prop in model.metadata_props:
if prop.key == "onnx.quant.pre_process" and prop.value == "onnxruntime.quant":
return True
return False
def add_infer_metadata(model: ModelProto):
metadata_props = {"onnx.infer": "onnxruntime.quant"}
if model.metadata_props:
for p in model.metadata_props:
metadata_props.update({p.key: p.value})
onnx.helper.set_model_props(model, metadata_props)
def model_has_infer_metadata(model: ModelProto) -> bool:
if model.metadata_props:
for p in model.metadata_props:
if p.key == "onnx.infer" and p.value == "onnxruntime.quant":
return True
return False
def load_model_with_shape_infer(model_path: Path) -> ModelProto:
inferred_model_path = generate_identified_filename(model_path, "-inferred")
onnx.shape_inference.infer_shapes_path(str(model_path), str(inferred_model_path))
model = onnx.load(inferred_model_path.as_posix())
add_infer_metadata(model)
inferred_model_path.unlink()
return model
def save_and_reload_model_with_shape_infer(model: ModelProto) -> ModelProto:
with tempfile.TemporaryDirectory(prefix="ort.quant.") as quant_tmp_dir:
model_path = Path(quant_tmp_dir).joinpath("model.onnx")
onnx.save_model(model, model_path.as_posix(), save_as_external_data=True)
return load_model_with_shape_infer(model_path)
def tensor_proto_to_array(initializer: TensorProto) -> numpy.ndarray:
if initializer.data_type == onnx_proto.TensorProto.FLOAT:
return onnx.numpy_helper.to_array(initializer)
raise ValueError(
f"Only float type is supported. Weights {initializer.name} is {type_to_name[initializer.data_type]}"
)
def add_quant_suffix(tensor_name: str) -> str:
return tensor_name + "_QuantizeLinear"
def add_quant_input_suffix(tensor_name: str) -> str:
return tensor_name + QUANT_INPUT_SUFFIX
def add_quant_output_suffix(tensor_name) -> str:
return tensor_name + "_QuantizeLinear_Output"
def add_dequant_suffix(tensor_name) -> str:
return tensor_name + "_DequantizeLinear"
def add_dequant_input_suffix(tensor_name) -> str:
return tensor_name + "_DequantizeLinear_Input"
def add_dequant_output_suffix(tensor_name) -> str:
return tensor_name + DEQUANT_OUTPUT_SUFFIX
| [
"noreply@github.com"
] | microsoft.noreply@github.com |
2e1cf3b0394156d2a894a0b65fc8e26942c7dcd1 | 58e0f9966c896e9559486e104246cb7b87814d4f | /server/src/uds/transports/SPICE/scripts/macosx/tunnel.py | fd402709740044d7ee414e9d2f500f3b9c6ca17c | [] | no_license | hanwoody/openuds | 53d02d9274663f05b2fbba532d5cd5ba0e63d229 | 6868e471c564b7153e0787fd9dcf67afa489472f | refs/heads/master | 2023-01-14T16:10:33.121913 | 2020-11-20T13:40:18 | 2020-11-20T13:40:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,844 | py | # This is a template
# Saved as .py for easier editing
from __future__ import unicode_literals
# pylint: disable=import-error, no-name-in-module, undefined-variable
import os
import subprocess
from uds import tools # @UnresolvedImport
from uds.forward import forward # @UnresolvedImport
remoteViewer = '/Applications/RemoteViewer.app/Contents/MacOS/RemoteViewer'
if not os.path.isfile(remoteViewer):
raise Exception('''<p>You need to have installed virt-viewer to connect to this UDS service.</p>
<p>
Please, install appropriate package for your system.
</p>
<p>
<a href="http://people.freedesktop.org/~teuf/spice-gtk-osx/dmg/0.5.7/RemoteViewer-0.5.7-1.dmg">Open download page</a>
</p>
<p>
Please, note that in order to UDS Connector to work correctly, you must copy the Remote Viewer app to your Applications Folder.<br/>
Also remember, that in order to allow this app to run on your system, you must open it one time once it is copied to your App folder
</p>
''')
theFile = sp['as_file_ns']
if sp['port'] != '-1':
forwardThread1, port = forward(sp['tunHost'], sp['tunPort'], sp['tunUser'], sp['tunPass'], sp['ip'], sp['port'])
if forwardThread1.status == 2:
raise Exception('Unable to open tunnel')
else:
port = -1
if sp['secure_port'] != '-1':
theFile = sp['as_file']
if port != -1:
forwardThread2, secure_port = forwardThread1.clone(sp['ip'], sp['secure_port'])
else:
forwardThread2, secure_port = forward(sp['tunHost'], sp['tunPort'], sp['tunUser'], sp['tunPass'], sp['ip'], sp['secure_port'])
if forwardThread2.status == 2:
raise Exception('Unable to open tunnel')
else:
secure_port = -1
theFile = theFile.format(
secure_port=secure_port,
port=port
)
filename = tools.saveTempFile(theFile)
subprocess.Popen([remoteViewer, filename])
| [
"dkmaster@dkmon.com"
] | dkmaster@dkmon.com |
03a630af197f2b420553a89fa48f12c265d3e2c6 | a41e1498e3c080f47abd8e8e57157548df3ebbf1 | /pandas/_libs/json.pyi | bc4fe68573b94290cfba2e66950c1b6d45ccf0dc | [
"BSD-3-Clause"
] | permissive | pandas-dev/pandas | e7e639454a298bebc272622e66faa9829ea393bb | c7325d7e7e77ecb4a4e57b48bc25265277c75712 | refs/heads/main | 2023-09-01T12:42:07.927176 | 2023-09-01T11:14:10 | 2023-09-01T11:14:10 | 858,127 | 36,166 | 18,728 | BSD-3-Clause | 2023-09-14T21:18:41 | 2010-08-24T01:37:33 | Python | UTF-8 | Python | false | false | 496 | pyi | from typing import (
Any,
Callable,
)
def ujson_dumps(
obj: Any,
ensure_ascii: bool = ...,
double_precision: int = ...,
indent: int = ...,
orient: str = ...,
date_unit: str = ...,
iso_dates: bool = ...,
default_handler: None
| Callable[[Any], str | float | bool | list | dict | None] = ...,
) -> str: ...
def ujson_loads(
s: str,
precise_float: bool = ...,
numpy: bool = ...,
dtype: None = ...,
labelled: bool = ...,
) -> Any: ...
| [
"noreply@github.com"
] | pandas-dev.noreply@github.com |
4777ab6677fa53f20a528eeea798e0d34d0e52ce | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /Jm4eKTENReSiQFw9t_2.py | 52ec433dfcf75a72f84bb522dfbafc075eb7e429 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | """
Create a function that takes a list of numbers `lst` and returns an **inverted
list**.
### Examples
invert_list([1, 2, 3, 4, 5]) ➞ [-1, -2, -3, -4, -5]
invert_list([1, -2, 3, -4, 5]) ➞ [-1, 2, -3, 4, -5]
invert_list([]) ➞ []
### Notes
* Don't forget to return the result.
* If you get stuck on a challenge, find help in the **Resources** tab.
* If you're _really_ stuck, unlock solutions in the **Solutions** tab.
"""
invert_list = lambda l:[-x for x in l]
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
87e445fdfb8e230b20e39d5730c4626dd30bbd3b | b2d06ad8145cbfe92835d62899f004dc207ad1b5 | /docs/releasenotes/_ext/extralinks.py | 0c1c134bcccd87a16677ddb27e7235acf1f70e69 | [
"MIT"
] | permissive | reviewboard/ReviewBot | 8027a9eb308b8c01f6d47e0372f543beff655014 | b59b566e127b5ef1b08f3189f1aa0194b7437d94 | refs/heads/master | 2023-06-10T00:25:11.506154 | 2023-05-31T22:36:34 | 2023-05-31T22:36:34 | 3,355,797 | 110 | 26 | MIT | 2020-11-05T08:56:37 | 2012-02-04T22:20:19 | Python | UTF-8 | Python | false | false | 1,536 | py | """Sphinx plugins for special links in the Release Notes."""
from __future__ import unicode_literals
from docutils import nodes, utils
def setup(app):
app.add_config_value('bugtracker_url', '', True)
app.add_role('bug', bug_role)
app.add_role('cve', cve_role)
def bug_role(role, rawtext, text, linenum, inliner, options={}, content=[]):
try:
bugnum = int(text)
if bugnum <= 0:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'Bug number must be a number greater than or equal to 1; '
'"%s" is invalid.' % text,
line=linenum)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
bugtracker_url = inliner.document.settings.env.config.bugtracker_url
if not bugtracker_url or '%s' not in bugtracker_url:
msg = inliner.reporter.error('bugtracker_url must be configured.',
line=linenum)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
ref = bugtracker_url % bugnum
node = nodes.reference(rawtext, 'Bug #' + utils.unescape(text),
refuri=ref, **options)
return [node], []
def cve_role(role, rawtext, text, linenum, inliner, options={}, content=[]):
ref = 'http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-%s' % text
node = nodes.reference(rawtext, 'CVE-' + utils.unescape(text),
refuri=ref, **options)
return [node], []
| [
"trowbrds@gmail.com"
] | trowbrds@gmail.com |
98f69779912e67333a6086d5c774c4ed63cce04b | ccbfc7818c0b75929a1dfae41dc061d5e0b78519 | /aliyun-openapi-python-sdk-master/aliyun-python-sdk-emr/aliyunsdkemr/request/v20160408/DeleteAlertContactsRequest.py | a65c6e6aa16be5feaaa822fedfab8b6ae1127736 | [
"Apache-2.0"
] | permissive | P79N6A/dysms_python | 44b634ffb2856b81d5f79f65889bfd5232a9b546 | f44877b35817e103eed469a637813efffa1be3e4 | refs/heads/master | 2020-04-28T15:25:00.368913 | 2019-03-13T07:52:34 | 2019-03-13T07:52:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,296 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DeleteAlertContactsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Emr', '2016-04-08', 'DeleteAlertContacts')
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_Ids(self):
return self.get_query_params().get('Ids')
def set_Ids(self,Ids):
self.add_query_param('Ids',Ids) | [
"1478458905@qq.com"
] | 1478458905@qq.com |
30f6dedd0154090c998e834c8d4659c17018e4ee | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_1/ME.py/A1.py | 1448c25b4be0d5ca606657bc0e3799fa716385d6 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 608 | py | ##
## PROBLEM SOLVING ALGORITHM
##
def solve(N):
if N==0:
return "INSOMNIA"
digits = [False for _ in range(10)]
n = N
while True:
for digit in str(n):
digits[int(digit)] = True
if not (False in digits):
return n
n += N
##
## MAIN LOOP: read(from stdin) - solve - print(to stdout)
##
T = int(input())
for t in range(T):
## read case
N = int(input())
## solve and print result
result = solve(N)
print('Case #'+str(t+1)+': '+str(result))
| [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
91e4b646687b8addbe5fbc3eef8e4b959ea6a397 | 11f473b6cf8aaf33152134f1862cdf2146980f92 | /fatiando/tests/mesher/test_tesseroid_mesh.py | bb53b6d6fd7cd0675dd8e4ae02b627077f427c24 | [
"BSD-3-Clause"
] | permissive | smudog/fatiando | 5856e061c15b066359b2ac5b2178d572b0f5ed3d | c4aec7a977be231217c23f4b767fea80171e1dd3 | refs/heads/master | 2021-01-20T09:42:29.594420 | 2016-12-19T10:46:11 | 2016-12-19T10:46:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 448 | py | from ...mesher import TesseroidMesh
import numpy as np
def test_tesseroid_mesh_copy():
orig = TesseroidMesh((0, 1, 0, 2, 3, 0), (1, 2, 2))
cp = orig.copy()
assert cp is not orig
assert orig.celltype == cp.celltype
assert orig.bounds == cp.bounds
assert orig.dump == cp.dump
orig.addprop('density', 3300 + np.zeros(orig.size))
cp = orig.copy()
assert np.array_equal(orig.props['density'], cp.props['density'])
| [
"leouieda@gmail.com"
] | leouieda@gmail.com |
a2011bd335f06239f1071ddd091443dcdbbc2baf | 087a0cfe910992bbf1913a1cc8ddb47eae53c81d | /browsercompat/bcauth/oauth2p/urls.py | fe6f542748de5c4cb6208b609ed0d0a39c453f85 | [] | no_license | WeilerWebServices/MDN-Web-Docs | a1e4716ce85ee6a7548819bcb19e78f6d1c14dfa | bc092964153b03381aaff74a4d80f43a2b2dec19 | refs/heads/master | 2023-01-29T17:47:06.730214 | 2020-12-09T05:53:29 | 2020-12-09T05:53:29 | 259,744,472 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,307 | py | """
Overrides for oauth2_provider.urls.
Same as default, but uses our AuthorizationView.
"""
from __future__ import absolute_import
from django.conf.urls import url
from oauth2_provider import views
from .views import MyAuthorizationView
urlpatterns = (
url(r'^authorize/$', MyAuthorizationView.as_view(), name='authorize'),
url(r'^token/$', views.TokenView.as_view(), name='token'),
url(r'^revoke_token/$', views.RevokeTokenView.as_view(),
name='revoke-token'),
)
# Application management views
urlpatterns += (
url(r'^applications/$', views.ApplicationList.as_view(), name='list'),
url(r'^applications/register/$', views.ApplicationRegistration.as_view(),
name='register'),
url(r'^applications/(?P<pk>\d+)/$', views.ApplicationDetail.as_view(),
name='detail'),
url(r'^applications/(?P<pk>\d+)/delete/$',
views.ApplicationDelete.as_view(), name='delete'),
url(r'^applications/(?P<pk>\d+)/update/$',
views.ApplicationUpdate.as_view(), name='update'),
)
urlpatterns += (
url(r'^authorized_tokens/$', views.AuthorizedTokensListView.as_view(),
name='authorized-token-list'),
url(r'^authorized_tokens/(?P<pk>\d+)/delete/$',
views.AuthorizedTokenDeleteView.as_view(),
name='authorized-token-delete'),
)
| [
"nateweiler84@gmail.com"
] | nateweiler84@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.