hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a051f456cf786012e54e9ca804359b91829fb53e
| 895
|
py
|
Python
|
Support/Fuego/Pythia/pythia-0.5/packages/fuego/fuego/serialization/chemkin/unpickle/mechanisms/ElementDb.py
|
balos1/PelePhysics
|
d01190cc7b0eaad4ec96fac573034ccb485f0e9f
|
[
"BSD-3-Clause-LBNL"
] | 31
|
2018-11-21T01:49:06.000Z
|
2022-03-30T03:41:43.000Z
|
Support/Fuego/Pythia/pythia-0.5/packages/fuego/fuego/serialization/chemkin/unpickle/mechanisms/ElementDb.py
|
balos1/PelePhysics
|
d01190cc7b0eaad4ec96fac573034ccb485f0e9f
|
[
"BSD-3-Clause-LBNL"
] | 123
|
2019-03-12T22:27:29.000Z
|
2022-03-29T17:00:04.000Z
|
Support/Fuego/Pythia/pythia-0.5/packages/fuego/fuego/serialization/chemkin/unpickle/mechanisms/ElementDb.py
|
sundials-codes/PelePhysics
|
5624f83a04f43aa95288be9d8a7bb372a4adefe6
|
[
"BSD-3-Clause-LBNL"
] | 32
|
2018-11-05T11:51:59.000Z
|
2022-03-29T13:09:32.000Z
|
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Michael A.G. Aivazis
# California Institute of Technology
# (C) 1998-2003 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from builtins import object
class ElementDb(object):
def element(self, element):
self._elements.append(element)
self._index[element.symbol] = element
return
def size(self):
return len(self._elements)
def find(self, symbol=None):
if symbol:
return self._index.get(symbol)
return self._elements
def __init__(self):
self._index = {}
self._elements = []
return
# version
__id__ = "$Id$"
# End of file
| 20.813953
| 82
| 0.446927
|
cc4400f9a9855de938c22690f8c4fc18049fc15d
| 1,423
|
py
|
Python
|
settings/common.py
|
anselmomendes/Poker-Hand-Value-Ratings
|
5fba451bddff83b7dec7d5211d5cd29ab66a5bb2
|
[
"MIT"
] | 1
|
2021-11-20T02:46:02.000Z
|
2021-11-20T02:46:02.000Z
|
settings/common.py
|
anselmomendes/Poker-Hand-Value-Ratings
|
5fba451bddff83b7dec7d5211d5cd29ab66a5bb2
|
[
"MIT"
] | null | null | null |
settings/common.py
|
anselmomendes/Poker-Hand-Value-Ratings
|
5fba451bddff83b7dec7d5211d5cd29ab66a5bb2
|
[
"MIT"
] | null | null | null |
#Support variables for suit checking and card scoring
class Common():
ROYAL_FLUSH_CARD:str = {"T": "10",
"J": "11",
"Q": "12",
"K": "13",
"A": "14"}
VALUE_CARD:str = {"2": "2",
"3": "3",
"4": "4",
"5": "5",
"6": "6",
"7": "7",
"8": "8",
"9": "9",
"T": "10",
"J": "11",
"Q": "12",
"K": "13",
"A": "14"}
VALUE_CARD_INV:str = {"2": "2",
"3": "3",
"4": "4",
"5": "5",
"6": "6",
"7": "7",
"8": "8",
"9": "9",
"10": "T",
"11": "J",
"12": "Q",
"13": "K",
"14": "A"}
SUIT_CARD:str = {"S": "0",
"H": "1",
"D": "2",
"C": "3"}
| 33.880952
| 54
| 0.141954
|
8c01a9f0e8c45460593b96b54b9c18686d8c1125
| 250
|
py
|
Python
|
atcoder/other/idn_fb_b.py
|
knuu/competitive-programming
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
[
"MIT"
] | 1
|
2018-11-12T15:18:55.000Z
|
2018-11-12T15:18:55.000Z
|
atcoder/other/idn_fb_b.py
|
knuu/competitive-programming
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
[
"MIT"
] | null | null | null |
atcoder/other/idn_fb_b.py
|
knuu/competitive-programming
|
16bc68fdaedd6f96ae24310d697585ca8836ab6e
|
[
"MIT"
] | null | null | null |
n = int(input())
w = [list(map(lambda x: int(x)-1, input().split())) for _ in range(n)]
syussya = [0] * (n*2)
for s, _ in w:
syussya[s] += 1
for i in range(1, n*2):
syussya[i] += syussya[i-1]
for s, t in w:
print(syussya[t] - syussya[s])
| 25
| 70
| 0.552
|
da53c5f4dff5d08ad8f4026df6dbe008bb7701c6
| 445
|
py
|
Python
|
core/program_service_map/serializer.py
|
themightychris/prevention-point
|
a92f98b25d32dd30bb33e7cb1ac7f10439f5203f
|
[
"MIT"
] | null | null | null |
core/program_service_map/serializer.py
|
themightychris/prevention-point
|
a92f98b25d32dd30bb33e7cb1ac7f10439f5203f
|
[
"MIT"
] | null | null | null |
core/program_service_map/serializer.py
|
themightychris/prevention-point
|
a92f98b25d32dd30bb33e7cb1ac7f10439f5203f
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from core.models import ProgramServiceMap
from core.services.serializers import ServiceSerializer
from core.programs.serializer import ProgramSerializer
class ProgramServiceMapSerializer(serializers.ModelSerializer):
service = ServiceSerializer(read_only=True)
program = ProgramSerializer(read_only=True)
class Meta:
model = ProgramServiceMap
fields = ("program", "service")
| 31.785714
| 63
| 0.795506
|
39080ca089bc4cd2ab417c9a66447a6ef0471d77
| 2,725
|
py
|
Python
|
jdcloud_sdk/services/cdn/apis/GetDomainListByFilterRequest.py
|
Tanc009/jdcloud-sdk-python
|
8b045c99bc5b73ca7348e950b6f01e03a27982f5
|
[
"Apache-2.0"
] | 14
|
2018-04-19T09:53:56.000Z
|
2022-01-27T06:05:48.000Z
|
jdcloud_sdk/services/cdn/apis/GetDomainListByFilterRequest.py
|
Tanc009/jdcloud-sdk-python
|
8b045c99bc5b73ca7348e950b6f01e03a27982f5
|
[
"Apache-2.0"
] | 15
|
2018-09-11T05:39:54.000Z
|
2021-07-02T12:38:02.000Z
|
jdcloud_sdk/services/cdn/apis/GetDomainListByFilterRequest.py
|
Tanc009/jdcloud-sdk-python
|
8b045c99bc5b73ca7348e950b6f01e03a27982f5
|
[
"Apache-2.0"
] | 33
|
2018-04-20T05:29:16.000Z
|
2022-02-17T09:10:05.000Z
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class GetDomainListByFilterRequest(JDCloudRequest):
"""
通过标签查询加速域名接口
"""
def __init__(self, parameters, header=None, version="v1"):
super(GetDomainListByFilterRequest, self).__init__(
'/domain:query', 'POST', header, version)
self.parameters = parameters
class GetDomainListByFilterParameters(object):
def __init__(self, ):
"""
"""
self.keyWord = None
self.pageNumber = None
self.pageSize = None
self.status = None
self.type = None
self.accelerateRegion = None
self.filterBy = None
self.tagFilters = None
def setKeyWord(self, keyWord):
"""
:param keyWord: (Optional) 根据关键字进行模糊匹配,域名或者回源信息
"""
self.keyWord = keyWord
def setPageNumber(self, pageNumber):
"""
:param pageNumber: (Optional) pageNumber,默认值为1
"""
self.pageNumber = pageNumber
def setPageSize(self, pageSize):
"""
:param pageSize: (Optional) pageSize,默认值为20,最大值为50
"""
self.pageSize = pageSize
def setStatus(self, status):
"""
:param status: (Optional) 根据域名状态查询, 可选值[offline, online, configuring, auditing, audit_reject]
"""
self.status = status
def setType(self, type):
"""
:param type: (Optional) 域名类型,(web:静态小文件,download:大文件加速,vod:视频加速,live:直播加速),不传查所有
"""
self.type = type
def setAccelerateRegion(self, accelerateRegion):
"""
:param accelerateRegion: (Optional) 加速区域,(mainLand:中国大陆,nonMainLand:海外加港澳台,all:全球),不传为全球
"""
self.accelerateRegion = accelerateRegion
def setFilterBy(self, filterBy):
"""
:param filterBy: (Optional) 筛选依据(0:根据域名筛选,1:根据回源信息筛选),默认按照域名进行筛选
"""
self.filterBy = filterBy
def setTagFilters(self, tagFilters):
"""
:param tagFilters: (Optional) 标签过滤条件
"""
self.tagFilters = tagFilters
| 28.385417
| 101
| 0.640734
|
841f5a308ac9628159314d202d92fddea08d9d01
| 20,868
|
py
|
Python
|
sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_product_policy_operations.py
|
JianpingChen/azure-sdk-for-python
|
3072fc8c0366287fbaea1b02493a50259c3248a2
|
[
"MIT"
] | 3
|
2020-06-23T02:25:27.000Z
|
2021-09-07T18:48:11.000Z
|
sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_product_policy_operations.py
|
JianpingChen/azure-sdk-for-python
|
3072fc8c0366287fbaea1b02493a50259c3248a2
|
[
"MIT"
] | 510
|
2019-07-17T16:11:19.000Z
|
2021-08-02T08:38:32.000Z
|
sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_product_policy_operations.py
|
JianpingChen/azure-sdk-for-python
|
3072fc8c0366287fbaea1b02493a50259c3248a2
|
[
"MIT"
] | 5
|
2019-09-04T12:51:37.000Z
|
2020-09-16T07:28:40.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ProductPolicyOperations:
"""ProductPolicyOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.apimanagement.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def list_by_product(
self,
resource_group_name: str,
service_name: str,
product_id: str,
**kwargs
) -> "_models.PolicyCollection":
"""Get the policy configuration at the Product level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param product_id: Product identifier. Must be unique in the current API Management service
instance.
:type product_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyCollection, or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyCollection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.list_by_product.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'productId': self._serialize.url("product_id", product_id, 'str', max_length=256, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PolicyCollection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_by_product.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies'} # type: ignore
async def get_entity_tag(
self,
resource_group_name: str,
service_name: str,
product_id: str,
policy_id: Union[str, "_models.PolicyIdName"],
**kwargs
) -> bool:
"""Get the ETag of the policy configuration at the Product level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param product_id: Product identifier. Must be unique in the current API Management service
instance.
:type product_id: str
:param policy_id: The identifier of the Policy.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool, or the result of cls(response)
:rtype: bool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.get_entity_tag.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'productId': self._serialize.url("product_id", product_id, 'str', max_length=256, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
if cls:
return cls(pipeline_response, None, response_headers)
return 200 <= response.status_code <= 299
get_entity_tag.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies/{policyId}'} # type: ignore
async def get(
self,
resource_group_name: str,
service_name: str,
product_id: str,
policy_id: Union[str, "_models.PolicyIdName"],
format: Optional[Union[str, "_models.PolicyExportFormat"]] = "xml",
**kwargs
) -> "_models.PolicyContract":
"""Get the policy configuration at the Product level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param product_id: Product identifier. Must be unique in the current API Management service
instance.
:type product_id: str
:param policy_id: The identifier of the Policy.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param format: Policy Export Format.
:type format: str or ~azure.mgmt.apimanagement.models.PolicyExportFormat
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyContract, or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyContract
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyContract"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'productId': self._serialize.url("product_id", product_id, 'str', max_length=256, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if format is not None:
query_parameters['format'] = self._serialize.query("format", format, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('PolicyContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies/{policyId}'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
service_name: str,
product_id: str,
policy_id: Union[str, "_models.PolicyIdName"],
parameters: "_models.PolicyContract",
if_match: Optional[str] = None,
**kwargs
) -> "_models.PolicyContract":
"""Creates or updates policy configuration for the Product.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param product_id: Product identifier. Must be unique in the current API Management service
instance.
:type product_id: str
:param policy_id: The identifier of the Policy.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param parameters: The policy contents to apply.
:type parameters: ~azure.mgmt.apimanagement.models.PolicyContract
:param if_match: ETag of the Entity. Not required when creating an entity, but required when
updating an entity.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PolicyContract, or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.PolicyContract
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyContract"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'productId': self._serialize.url("product_id", product_id, 'str', max_length=256, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'PolicyContract')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 200:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('PolicyContract', pipeline_response)
if response.status_code == 201:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('PolicyContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies/{policyId}'} # type: ignore
async def delete(
self,
resource_group_name: str,
service_name: str,
product_id: str,
policy_id: Union[str, "_models.PolicyIdName"],
if_match: str,
**kwargs
) -> None:
"""Deletes the policy configuration at the Product.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param product_id: Product identifier. Must be unique in the current API Management service
instance.
:type product_id: str
:param policy_id: The identifier of the Policy.
:type policy_id: str or ~azure.mgmt.apimanagement.models.PolicyIdName
:param if_match: ETag of the Entity. ETag should match the current entity state from the header
response of the GET request or it should be * for unconditional update.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'productId': self._serialize.url("product_id", product_id, 'str', max_length=256, min_length=1),
'policyId': self._serialize.url("policy_id", policy_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/products/{productId}/policies/{policyId}'} # type: ignore
| 51.022005
| 222
| 0.671363
|
eec64a7ba9ea76a0a4a97cdb807d46ca343b6b73
| 6,611
|
py
|
Python
|
arango/client.py
|
aquamatthias/python-arango
|
9212461e2349f692312ee4e74756e5bbea2bc582
|
[
"MIT"
] | 389
|
2015-10-16T13:26:33.000Z
|
2021-08-15T04:26:32.000Z
|
arango/client.py
|
ITIVA-HUB/python-arango
|
ff990fde4a4403da170d8759adb46a7100e403a6
|
[
"MIT"
] | 165
|
2015-10-12T17:28:33.000Z
|
2021-07-26T16:46:49.000Z
|
arango/client.py
|
ITIVA-HUB/python-arango
|
ff990fde4a4403da170d8759adb46a7100e403a6
|
[
"MIT"
] | 72
|
2015-12-07T17:29:19.000Z
|
2021-08-13T14:30:07.000Z
|
__all__ = ["ArangoClient"]
from json import dumps, loads
from typing import Any, Callable, Optional, Sequence, Union
from pkg_resources import get_distribution
from arango.connection import (
BasicConnection,
Connection,
JwtConnection,
JwtSuperuserConnection,
)
from arango.database import StandardDatabase
from arango.exceptions import ServerConnectionError
from arango.http import DefaultHTTPClient, HTTPClient
from arango.resolver import (
HostResolver,
RandomHostResolver,
RoundRobinHostResolver,
SingleHostResolver,
)
class ArangoClient:
"""ArangoDB client.
:param hosts: Host URL or list of URLs (coordinators in a cluster).
:type hosts: str | [str]
:param host_resolver: Host resolver. This parameter used for clusters (when
multiple host URLs are provided). Accepted values are "roundrobin" and
"random". Any other value defaults to round robin.
:type host_resolver: str
:param http_client: User-defined HTTP client.
:type http_client: arango.http.HTTPClient
:param serializer: User-defined JSON serializer. Must be a callable
which takes a JSON data type object as its only argument and return
the serialized string. If not given, ``json.dumps`` is used by default.
:type serializer: callable
:param deserializer: User-defined JSON de-serializer. Must be a callable
which takes a JSON serialized string as its only argument and return
the de-serialized object. If not given, ``json.loads`` is used by
default.
:type deserializer: callable
"""
def __init__(
self,
hosts: Union[str, Sequence[str]] = "http://127.0.0.1:8529",
host_resolver: str = "roundrobin",
http_client: Optional[HTTPClient] = None,
serializer: Callable[..., str] = lambda x: dumps(x),
deserializer: Callable[[str], Any] = lambda x: loads(x),
) -> None:
if isinstance(hosts, str):
self._hosts = [host.strip("/") for host in hosts.split(",")]
else:
self._hosts = [host.strip("/") for host in hosts]
host_count = len(self._hosts)
self._host_resolver: HostResolver
if host_count == 1:
self._host_resolver = SingleHostResolver()
elif host_resolver == "random":
self._host_resolver = RandomHostResolver(host_count)
else:
self._host_resolver = RoundRobinHostResolver(host_count)
self._http = http_client or DefaultHTTPClient()
self._serializer = serializer
self._deserializer = deserializer
self._sessions = [self._http.create_session(h) for h in self._hosts]
def __repr__(self) -> str:
return f"<ArangoClient {','.join(self._hosts)}>"
def close(self) -> None: # pragma: no cover
"""Close HTTP sessions."""
for session in self._sessions:
session.close()
@property
def hosts(self) -> Sequence[str]:
"""Return the list of ArangoDB host URLs.
:return: List of ArangoDB host URLs.
:rtype: [str]
"""
return self._hosts
@property
def version(self) -> str:
"""Return the client version.
:return: Client version.
:rtype: str
"""
return get_distribution("python-arango").version
def db(
self,
name: str = "_system",
username: str = "root",
password: str = "",
verify: bool = False,
auth_method: str = "basic",
superuser_token: Optional[str] = None,
) -> StandardDatabase:
"""Connect to an ArangoDB database and return the database API wrapper.
:param name: Database name.
:type name: str
:param username: Username for basic authentication.
:type username: str
:param password: Password for basic authentication.
:type password: str
:param verify: Verify the connection by sending a test request.
:type verify: bool
:param auth_method: HTTP authentication method. Accepted values are
"basic" (default) and "jwt". If set to "jwt", the token is
refreshed automatically using ArangoDB username and password. This
assumes that the clocks of the server and client are synchronized.
:type auth_method: str
:param superuser_token: User generated token for superuser access.
If set, parameters **username**, **password** and **auth_method**
are ignored. This token is not refreshed automatically.
:type superuser_token: str
:return: Standard database API wrapper.
:rtype: arango.database.StandardDatabase
:raise arango.exceptions.ServerConnectionError: If **verify** was set
to True and the connection fails.
"""
connection: Connection
if superuser_token is not None:
connection = JwtSuperuserConnection(
hosts=self._hosts,
host_resolver=self._host_resolver,
sessions=self._sessions,
db_name=name,
http_client=self._http,
serializer=self._serializer,
deserializer=self._deserializer,
superuser_token=superuser_token,
)
elif auth_method.lower() == "basic":
connection = BasicConnection(
hosts=self._hosts,
host_resolver=self._host_resolver,
sessions=self._sessions,
db_name=name,
username=username,
password=password,
http_client=self._http,
serializer=self._serializer,
deserializer=self._deserializer,
)
elif auth_method.lower() == "jwt":
connection = JwtConnection(
hosts=self._hosts,
host_resolver=self._host_resolver,
sessions=self._sessions,
db_name=name,
username=username,
password=password,
http_client=self._http,
serializer=self._serializer,
deserializer=self._deserializer,
)
else:
raise ValueError(f"invalid auth_method: {auth_method}")
if verify:
try:
connection.ping()
except ServerConnectionError as err:
raise err
except Exception as err:
raise ServerConnectionError(f"bad connection: {err}")
return StandardDatabase(connection)
| 36.125683
| 79
| 0.615036
|
67fe39592b9448c68f8f6b5d0bc3c38475e1e271
| 968
|
py
|
Python
|
vfssh/vfs/fs_object.py
|
palwolus/Cyder
|
c6b123f3fd142b98017cf01ffb5efa28c2a075be
|
[
"MIT"
] | 9
|
2020-02-14T15:49:57.000Z
|
2021-03-02T20:17:41.000Z
|
vfssh/vfs/fs_object.py
|
palwolus/Cyder
|
c6b123f3fd142b98017cf01ffb5efa28c2a075be
|
[
"MIT"
] | null | null | null |
vfssh/vfs/fs_object.py
|
palwolus/Cyder
|
c6b123f3fd142b98017cf01ffb5efa28c2a075be
|
[
"MIT"
] | 2
|
2020-02-13T18:55:07.000Z
|
2020-10-03T02:31:45.000Z
|
class FileObject:
def __init__(self, name, attributes, location, data=None, memory=None, size=None, timestamp=None, owner=None):
self.name = name
self.attributes = attributes
self.loc = location
# TODO: Find size of obj/file
self.size = size if size else size
self.memory = memory
self.timestamp = timestamp
self.owner = owner
self.group = owner
if memory is True:
with open(data, 'r') as f:
self._data = f.read()
else:
self._data = data
def to_list(self):
return [self.name, self.attributes, self._data]
def to_dict(self):
return {'name': self.name, 'attribute': self.attributes, 'type': __class__, 'data': self._data}
@property
def data(self):
if self.memory is False:
with open(self._data, 'r') as f:
return f.read()
else:
return self._data
| 31.225806
| 114
| 0.56405
|
fe3185555682fed99102d8c0fa0d7b5d4470187c
| 2,396
|
py
|
Python
|
python/lsst/eotest/sensor/ctesim.py
|
tguillemLSST/eotest
|
c6f150984fa5dff85b9805028645bf46fc846f11
|
[
"BSD-3-Clause-LBNL"
] | 3
|
2016-04-21T07:05:45.000Z
|
2020-08-05T08:37:37.000Z
|
python/lsst/eotest/sensor/ctesim.py
|
tguillemLSST/eotest
|
c6f150984fa5dff85b9805028645bf46fc846f11
|
[
"BSD-3-Clause-LBNL"
] | 70
|
2015-03-26T09:48:53.000Z
|
2020-04-22T16:29:43.000Z
|
python/lsst/eotest/sensor/ctesim.py
|
tguillemLSST/eotest
|
c6f150984fa5dff85b9805028645bf46fc846f11
|
[
"BSD-3-Clause-LBNL"
] | 5
|
2017-08-15T20:52:44.000Z
|
2022-03-25T12:54:07.000Z
|
"""
Simulate effects of CTE using cte_matrix.
"""
from __future__ import print_function
from __future__ import absolute_import
import numpy as np
import astropy.io.fits as fits
import lsst.afw.image as afwImage
import lsst.eotest.image_utils as imutils
from .AmplifierGeometry import makeAmplifierGeometry
from .cte_matrix import cte_matrix
from . import sim_tools
_dtypes = dict([(-32, np.float32), (16, np.int16)])
def convert(imarr, bitpix):
if bitpix > 0:
my_round = np.round
else:
def my_round(x): return x
return np.array(my_round(imarr), dtype=_dtypes[bitpix])
def fitsFile(segments, input):
output = fits.HDUList()
output.append(fits.PrimaryHDU())
output[0].header = input[0].header
for amp in segments:
bitpix = input[amp].header['BITPIX']
imarr = convert(segments[amp].getArray(), bitpix)
output.append(fits.ImageHDU(data=imarr))
output[amp].header = input[amp].header
return output
def ctesim(infile, pcti=0, scti=0, verbose=False):
input = fits.open(infile)
amps = [i for i in range(1, len(input))
if input[i].name.upper().startswith('SEGMENT')]
if not isinstance(pcti, dict):
pcti = {amp: pcti for amp in amps}
if not isinstance(scti, dict):
scti = {amp: scti for amp in amps}
segments = {}
for amp in amps:
if verbose:
print("ctesim: working on amp", amp)
image = afwImage.ImageF(infile, imutils.dm_hdu(amp))
geom = makeAmplifierGeometry(infile)
#
# Temporarily remove readout bias median.
#
bias_med = imutils.median(image.Factory(image, geom.serial_overscan))
image -= bias_med
imarr = image.getArray()
outimage = afwImage.ImageF(image, True)
outarr = outimage.getArray()
if pcti[amp] != 0:
pcte_matrix = cte_matrix(imarr.shape[0], pcti[amp])
for col in range(0, imarr.shape[1]):
outarr[:, col] = np.dot(pcte_matrix, imarr[:, col])
if scti[amp] != 0:
scte_matrix = cte_matrix(imarr.shape[1], scti[amp])
for row in range(0, imarr.shape[0]):
outarr[row, :] = np.dot(scte_matrix, outarr[row, :])
#
# Restore readout bias
#
outarr += bias_med
segments[amp] = outimage
return fitsFile(segments, input)
| 31.116883
| 77
| 0.623957
|
2b6c73a38bbdecdb99e71139b76acb65ba04b29d
| 567
|
py
|
Python
|
notification-endpoint/python/resource/__init__.py
|
hchoi36/CloudGuard-Repos-
|
88ef000b2bca438e60612a64cd2434b9985ad4c1
|
[
"MIT"
] | 46
|
2017-05-23T18:09:05.000Z
|
2022-02-15T15:09:34.000Z
|
notification-endpoint/python/resource/__init__.py
|
hchoi36/CloudGuard-Repos-
|
88ef000b2bca438e60612a64cd2434b9985ad4c1
|
[
"MIT"
] | 28
|
2017-06-09T06:59:21.000Z
|
2020-04-22T17:38:41.000Z
|
notification-endpoint/python/resource/__init__.py
|
hchoi36/CloudGuard-Repos-
|
88ef000b2bca438e60612a64cd2434b9985ad4c1
|
[
"MIT"
] | 70
|
2017-05-15T14:09:56.000Z
|
2022-03-24T11:55:33.000Z
|
import logging
import json
import azure.functions as func
def main(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Python HTTP trigger function processed a request.')
try:
req_body = req.get_json()
request_as_text = json.dumps(req_body, default=lambda o: o.__dict__)
logging.info(request_as_text)
except e as Exception:
logging.exception(e)
return func.HttpResponse(
"An error occurred.",
status_code=500
)
return func.HttpResponse(f"Success")
| 27
| 77
| 0.638448
|
e24f0fbec77b62b8419d57aba95f0cfda4144b66
| 16,150
|
py
|
Python
|
code/default/gae_proxy/server/gae/gae.py
|
wuyongwen/XX-Net
|
313aefd862b8f230f7c61dc29db1b2b93a17e6ab
|
[
"BSD-2-Clause"
] | null | null | null |
code/default/gae_proxy/server/gae/gae.py
|
wuyongwen/XX-Net
|
313aefd862b8f230f7c61dc29db1b2b93a17e6ab
|
[
"BSD-2-Clause"
] | null | null | null |
code/default/gae_proxy/server/gae/gae.py
|
wuyongwen/XX-Net
|
313aefd862b8f230f7c61dc29db1b2b93a17e6ab
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
# coding:utf-8
# GAE limit:
# only support http/https request, don't support tcp/udp connect for unpaid user.
# max timeout for every request is 60 seconds
# max upload data size is 30M
# max download data size is 10M
# How to Download file large then 10M?
# HTTP protocol support range fetch.
# If server return header include "accept-ranges", then client can request special range
# by put Content-Range in request header.
#
# GAE server will return 206 status code if file is too large and server support range fetch.
# Then GAE_proxy local client will switch to range fetch mode.
__version__ = '3.3.1'
__password__ = ''
__hostsdeny__ = ()
#__hostsdeny__ = ('.youtube.com', '.youku.com', ".googlevideo.com")
import os
import re
import time
import struct
import zlib
import base64
import logging
import urlparse
import httplib
import io
import string
import traceback
from google.appengine.api import urlfetch
from google.appengine.api.taskqueue.taskqueue import MAX_URL_LENGTH
from google.appengine.runtime import apiproxy_errors
URLFETCH_MAX = 2
URLFETCH_MAXSIZE = 4 * 1024 * 1024
URLFETCH_DEFLATE_MAXSIZE = 4 * 1024 * 1024
URLFETCH_TIMEOUT = 30
def message_html(title, banner, detail=''):
MESSAGE_TEMPLATE = '''
<html><head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<title>$title</title>
<style><!--
body {font-family: arial,sans-serif}
div.nav {margin-top: 1ex}
div.nav A {font-size: 10pt; font-family: arial,sans-serif}
span.nav {font-size: 10pt; font-family: arial,sans-serif; font-weight: bold}
div.nav A,span.big {font-size: 12pt; color: #0000cc}
div.nav A {font-size: 10pt; color: black}
A.l:link {color: #6f6f6f}
A.u:link {color: green}
//--></style>
</head>
<body text=#000000 bgcolor=#ffffff>
<table border=0 cellpadding=2 cellspacing=0 width=100%>
<tr><td bgcolor=#3366cc><font face=arial,sans-serif color=#ffffff><b>Message From FetchServer</b></td></tr>
<tr><td> </td></tr></table>
<blockquote>
<H1>$banner</H1>
$detail
<p>
</blockquote>
<table width=100% cellpadding=0 cellspacing=0><tr><td bgcolor=#3366cc><img alt="" width=1 height=4></td></tr></table>
</body></html>
'''
return string.Template(MESSAGE_TEMPLATE).substitute(
title=title, banner=banner, detail=detail)
try:
from Crypto.Cipher.ARC4 import new as RC4Cipher
except ImportError:
logging.warn('Load Crypto.Cipher.ARC4 Failed, Use Pure Python Instead.')
class RC4Cipher(object):
def __init__(self, key):
x = 0
box = range(256)
for i, y in enumerate(box):
x = (x + y + ord(key[i % len(key)])) & 0xff
box[i], box[x] = box[x], y
self.__box = box
self.__x = 0
self.__y = 0
def encrypt(self, data):
out = []
out_append = out.append
x = self.__x
y = self.__y
box = self.__box
for char in data:
x = (x + 1) & 0xff
y = (y + box[x]) & 0xff
box[x], box[y] = box[y], box[x]
out_append(chr(ord(char) ^ box[(box[x] + box[y]) & 0xff]))
self.__x = x
self.__y = y
return ''.join(out)
def inflate(data):
return zlib.decompress(data, -zlib.MAX_WBITS)
def deflate(data):
return zlib.compress(data)[2:-4]
def format_response(status, headers, content):
if content:
headers.pop('content-length', None)
headers['Content-Length'] = str(len(content))
data = 'HTTP/1.1 %d %s\r\n%s\r\n\r\n%s' % \
(status,
httplib.responses.get(status,'Unknown'),
'\r\n'.join('%s: %s' % (k.title(), v) for k, v in headers.items()),
content)
data = deflate(data)
return struct.pack('!h', len(data)) + data
def application(environ, start_response):
if environ['REQUEST_METHOD'] == 'GET' and 'HTTP_X_URLFETCH_PS1' not in environ:
# xxnet 自用
timestamp = long(
os.environ['CURRENT_VERSION_ID'].split('.')[1]) / 2**28
ctime = time.strftime(
'%Y-%m-%d %H:%M:%S',
time.gmtime(
timestamp + 8 * 3600))
start_response('200 OK', [('Content-Type', 'text/plain')])
yield 'GoAgent Python Server %s works, deployed at %s\n' % (__version__, ctime)
if len(__password__) > 2:
yield 'Password: %s%s%s' % (__password__[0], '*' * (len(__password__) - 2), __password__[-1])
raise StopIteration
start_response('200 OK', [('Content-Type', 'image/gif')])
if environ['REQUEST_METHOD'] == 'HEAD':
raise StopIteration
# 请求头则已经完成
options = environ.get('HTTP_X_URLFETCH_OPTIONS', '')
# 不知道怎么直接获得的
# 但一般,此段语句无用
if 'rc4' in options and not __password__:
# 如果客户端需要加密,但gae无密码
# 但rc4 如不改源码,则恒为假
yield format_response(400, {'Content-Type': 'text/html; charset=utf-8'}, message_html('400 Bad Request', 'Bad Request (options) - please set __password__ in gae.py', 'please set __password__ and upload gae.py again'))
raise StopIteration
try:
if 'HTTP_X_URLFETCH_PS1' in environ:
# 第一部分
payload = inflate(base64.b64decode(environ['HTTP_X_URLFETCH_PS1']))
body = inflate(
base64.b64decode(
# 第二部分 即原始body
environ['HTTP_X_URLFETCH_PS2'])) if 'HTTP_X_URLFETCH_PS2' in environ else ''
else:
# POST
# POST 获取数据的方式
wsgi_input = environ['wsgi.input']
input_data = wsgi_input.read(
int(environ.get('CONTENT_LENGTH', '0')))
if 'rc4' in options:
input_data = RC4Cipher(__password__).encrypt(input_data)
payload_length, = struct.unpack('!h', input_data[:2]) # 获取长度
payload = inflate(input_data[2:2 + payload_length]) # 获取负载
body = input_data[2 + payload_length:] # 获取body
raw_response_line, payload = payload.split('\r\n', 1)
method, url = raw_response_line.split()[:2]
# http content:
# 此为body
#{
# pack_req_head_len: 2 bytes,#POST 时使用
# pack_req_head : deflate{
# 此为负载
# original request line,
# original request headers,
# X-URLFETCH-kwargs HEADS, {
# password,
# maxsize, defined in config AUTO RANGE MAX SIZE
# timeout, request timeout for GAE urlfetch.
#}
#}
# body
#}
headers = {}
# 获取 原始头
for line in payload.splitlines():
key, value = line.split(':', 1)
headers[key.title()] = value.strip()
except (zlib.error, KeyError, ValueError):
import traceback
yield format_response(500, {'Content-Type': 'text/html; charset=utf-8'}, message_html('500 Internal Server Error', 'Bad Request (payload) - Possible Wrong Password', '<pre>%s</pre>' % traceback.format_exc()))
raise StopIteration
# 获取gae用的头
kwargs = {}
any(kwargs.__setitem__(x[len('x-urlfetch-'):].lower(), headers.pop(x))
for x in headers.keys() if x.lower().startswith('x-urlfetch-'))
if 'Content-Encoding' in headers and body:
# fix bug for LinkedIn android client
if headers['Content-Encoding'] == 'deflate':
try:
body2 = inflate(body)
headers['Content-Length'] = str(len(body2))
del headers['Content-Encoding']
body = body2
except BaseException:
pass
logging.info(
'%s "%s %s %s" - -',
environ['REMOTE_ADDR'],
method,
url,
'HTTP/1.1')
# 参数使用
if __password__ and __password__ != kwargs.get('password', ''):
yield format_response(403, {'Content-Type': 'text/html; charset=utf-8'}, message_html('403 Wrong password', 'Wrong password(%r)' % kwargs.get('password', ''), 'GoAgent proxy.ini password is wrong!'))
raise StopIteration
netloc = urlparse.urlparse(url).netloc
if __hostsdeny__ and netloc.endswith(__hostsdeny__):
yield format_response(403, {'Content-Type': 'text/html; charset=utf-8'}, message_html('403 Hosts Deny', 'Hosts Deny(%r)' % netloc, detail='公用appid因为资源有限,限制观看视频和文件下载等消耗资源过多的访问,请使用自己的appid <a href=" https://github.com/XX-net/XX-Net/wiki/Register-Google-appid" target="_blank">帮助</a> '))
raise StopIteration
if len(url) > MAX_URL_LENGTH:
yield format_response(400, {'Content-Type': 'text/html; charset=utf-8'}, message_html('400 Bad Request', 'length of URL too long(greater than %r)' % MAX_URL_LENGTH, detail='url=%r' % url))
raise StopIteration
if netloc.startswith(('127.0.0.', '::1', 'localhost')):
# 测试用
yield format_response(400, {'Content-Type': 'text/html; charset=utf-8'}, message_html('GoAgent %s is Running' % __version__, 'Now you can visit some websites', ''.join('<a href="https://%s/">%s</a><br/>' % (x, x) for x in ('google.com', 'mail.google.com'))))
raise StopIteration
fetchmethod = getattr(urlfetch, method, None)
if not fetchmethod:
yield format_response(405, {'Content-Type': 'text/html; charset=utf-8'}, message_html('405 Method Not Allowed', 'Method Not Allowed: %r' % method, detail='Method Not Allowed URL=%r' % url))
raise StopIteration
timeout = int(kwargs.get('timeout', URLFETCH_TIMEOUT))
validate_certificate = bool(int(kwargs.get('validate', 0)))
maxsize = int(kwargs.get('maxsize', 0))
# https://www.freebsdchina.org/forum/viewtopic.php?t=54269
accept_encoding = headers.get(
'Accept-Encoding',
'') or headers.get(
'Bccept-Encoding',
'')
errors = []
allow_truncated = False
for i in xrange(int(kwargs.get('fetchmax', URLFETCH_MAX))):
try:
response = urlfetch.fetch(
url,
body,
fetchmethod,
headers,
allow_truncated=allow_truncated,
follow_redirects=False,
deadline=timeout,
validate_certificate=validate_certificate)
# 获取真正response
break
except apiproxy_errors.OverQuotaError as e:
time.sleep(5)
except urlfetch.DeadlineExceededError as e:
errors.append('%r, timeout=%s' % (e, timeout))
logging.error(
'DeadlineExceededError(timeout=%s, url=%r)',
timeout,
url)
time.sleep(1)
# 必须truncaated
allow_truncated = True
m = re.search(r'=\s*(\d+)-', headers.get('Range')
or headers.get('range') or '')
if m is None:
headers['Range'] = 'bytes=0-%d' % (maxsize or URLFETCH_MAXSIZE)
else:
headers.pop('Range', '')
headers.pop('range', '')
start = int(m.group(1))
headers['Range'] = 'bytes=%s-%d' % (start,
start + (maxsize or URLFETCH_MAXSIZE))
timeout *= 2
except urlfetch.DownloadError as e:
errors.append('%r, timeout=%s' % (e, timeout))
logging.error('DownloadError(timeout=%s, url=%r)', timeout, url)
time.sleep(1)
timeout *= 2
except urlfetch.ResponseTooLargeError as e:
errors.append('%r, timeout=%s' % (e, timeout))
response = e.response
logging.error(
'ResponseTooLargeError(timeout=%s, url=%r) response(%r)',
timeout,
url,
response)
allow_truncated = True
m = re.search(r'=\s*(\d+)-', headers.get('Range')
or headers.get('range') or '')
if m is None:
headers['Range'] = 'bytes=0-%d' % (maxsize or URLFETCH_MAXSIZE)
else:
headers.pop('Range', '')
headers.pop('range', '')
start = int(m.group(1))
headers['Range'] = 'bytes=%s-%d' % (start,
start + (maxsize or URLFETCH_MAXSIZE))
timeout *= 2
except urlfetch.SSLCertificateError as e:
errors.append('%r, should validate=0 ?' % e)
logging.error('%r, timeout=%s', e, timeout)
except Exception as e:
errors.append(str(e))
stack_str = "stack:%s" % traceback.format_exc()
errors.append(stack_str)
if i == 0 and method == 'GET':
timeout *= 2
else:
error_string = '<br />\n'.join(errors)
if not error_string:
logurl = 'https://appengine.google.com/logs?&app_id=%s' % os.environ['APPLICATION_ID']
error_string = 'Internal Server Error. <p/>try <a href="javascript:window.location.reload(true);">refresh</a> or goto <a href="%s" target="_blank">appengine.google.com</a> for details' % logurl
yield format_response(502, {'Content-Type': 'text/html; charset=utf-8'}, message_html('502 Urlfetch Error', 'Python Urlfetch Error: %r' % method, error_string))
raise StopIteration
#logging.debug('url=%r response.status_code=%r response.headers=%r response.content[:1024]=%r', url, response.status_code, dict(response.headers), response.content[:1024])
#以上实现fetch 的细节
status_code = int(response.status_code)
data = response.content
response_headers = response.headers
response_headers['X-Head-Content-Length'] = response_headers.get(
'Content-Length', '')
# for k in response_headers:
# v = response_headers[k]
# logging.debug("Head:%s: %s", k, v)
content_type = response_headers.get('content-type', '')
# 也是分片合并之类的细节
if status_code == 200 and maxsize and len(data) > maxsize and response_headers.get(
'accept-ranges', '').lower() == 'bytes' and int(response_headers.get('content-length', 0)):
logging.debug("data len:%d max:%d", len(data), maxsize)
status_code = 206
response_headers['Content-Range'] = 'bytes 0-%d/%d' % (
maxsize - 1, len(data))
data = data[:maxsize]
if status_code == 200 and 'content-encoding' not in response_headers and 512 < len(
data) < URLFETCH_DEFLATE_MAXSIZE and content_type.startswith(('text/', 'application/json', 'application/javascript')):
if 'gzip' in accept_encoding:
response_headers['Content-Encoding'] = 'gzip'
compressobj = zlib.compressobj(
zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0)
dataio = io.BytesIO()
dataio.write('\x1f\x8b\x08\x00\x00\x00\x00\x00\x02\xff')
dataio.write(compressobj.compress(data))
dataio.write(compressobj.flush())
dataio.write(
struct.pack(
'<LL',
zlib.crc32(data) & 0xFFFFFFFF,
len(data) & 0xFFFFFFFF))
data = dataio.getvalue()
elif 'deflate' in accept_encoding:
response_headers['Content-Encoding'] = 'deflate'
data = deflate(data)
response_headers['Content-Length'] = str(len(data))
if 'rc4' not in options:
yield format_response(status_code, response_headers, '')
yield data
else:
cipher = RC4Cipher(__password__)
yield cipher.encrypt(format_response(status_code, response_headers, ''))
yield cipher.encrypt(data)
| 39.975248
| 293
| 0.569164
|
e9d09cab60b868395ddcd86d8b42900dad6b4180
| 387
|
py
|
Python
|
pformat/__init__.py
|
beasteers/pformat
|
00b58a3e1ea7ff127295e1f70f496d7398ddab24
|
[
"MIT"
] | 1
|
2021-03-15T16:11:41.000Z
|
2021-03-15T16:11:41.000Z
|
pformat/__init__.py
|
beasteers/pformat
|
00b58a3e1ea7ff127295e1f70f496d7398ddab24
|
[
"MIT"
] | null | null | null |
pformat/__init__.py
|
beasteers/pformat
|
00b58a3e1ea7ff127295e1f70f496d7398ddab24
|
[
"MIT"
] | null | null | null |
'''Partial formatting, and more
'''
from . import core
# name shortcuts
PARTIAL = core.PartialFormatter
GLOB = core.GlobFormatter
REGEX = core.RegexFormatter
DEFAULT = core.DefaultFormatter
# Initialize default formatters
pformat = core.PartialFormatter().format
gformat = core.GlobFormatter().format
reformat = core.RegexFormatter().format
dformat = core.DefaultFormatter().format
| 20.368421
| 40
| 0.78553
|
a95ac7464261093eaf3c1ded292bf0c3dce31471
| 596
|
py
|
Python
|
Tree/Easy/112. Path Sum/solution.py
|
tintindas/leetcode-solutions
|
eb97254dafddffccbce048ef04aea1e934277282
|
[
"MIT"
] | 1
|
2021-03-10T02:48:39.000Z
|
2021-03-10T02:48:39.000Z
|
Tree/Easy/112. Path Sum/solution.py
|
tintindas/leetcode-solutions
|
eb97254dafddffccbce048ef04aea1e934277282
|
[
"MIT"
] | null | null | null |
Tree/Easy/112. Path Sum/solution.py
|
tintindas/leetcode-solutions
|
eb97254dafddffccbce048ef04aea1e934277282
|
[
"MIT"
] | 1
|
2021-03-18T08:22:29.000Z
|
2021-03-18T08:22:29.000Z
|
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def hasPathSum(self, root: TreeNode, targetSum: int) -> bool:
if not root:
return False
if root.left == None and root.right == None:
if root.val == targetSum:
return True
else:
return False
return self.hasPathSum(root.left, targetSum - root.val) or self.hasPathSum(root.right, targetSum - root.val)
| 29.8
| 116
| 0.58557
|
9b582a73decb0ad26e11b6a0fdce751076f63f0e
| 3,211
|
py
|
Python
|
SpeedTrig.py
|
bryli/SpeedTrig
|
5b5200ddaa39b3ce18ba77b5c4e7f77efd904b17
|
[
"MIT"
] | null | null | null |
SpeedTrig.py
|
bryli/SpeedTrig
|
5b5200ddaa39b3ce18ba77b5c4e7f77efd904b17
|
[
"MIT"
] | 1
|
2020-01-20T16:33:34.000Z
|
2020-01-20T16:33:34.000Z
|
SpeedTrig.py
|
bryli/SpeedTrig
|
5b5200ddaa39b3ce18ba77b5c4e7f77efd904b17
|
[
"MIT"
] | null | null | null |
from flask import Flask, request, render_template, send_file
from io import BytesIO
import TrigGen
from datetime import datetime
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def home():
return render_template('index.html')
@app.route('/generated-quiz', methods=['GET', 'POST'])
def generated():
### Title is a string used to create worksheet title text.
if "title" in request.form:
title = request.form["title"]
else:
title = "Speed Trig Quiz"
print(title)
### norm is a Boolean for whether the Normal Trig Functions option was selected.
# True if selected
# False if not selected
norm = "norm" in request.form
print(norm)
### reci is a Boolean for whether the Reciprocal Trig Functions option was selected.
# True if selected
# False if not selected
reci = "reci" in request.form
print(reci)
### invnorm is a Boolean for whether the Inverse Normal Trig Functions option was selected.
# True if selected
# False if not selected
invnorm = "invnorm" in request.form
print(invnorm)
### invreci is a Boolean for whether the Inverse Reciprocal Trig Functions option was selected.
# True if selected
# False if not selected
invreci = "invreci" in request.form
print(invreci)
### inc is a Boolean for whether the user wants values above 2π or below 0.
# True if selected
# False if not selected
if "inc" in request.form:
inc = True if request.form["inc"] == "yes" else False
else:
inc = False
print(inc)
timesNewRoman = "timesnewroman" in request.form
### override is a Boolean for whether the user wants exact number or percent chance.
# True if % chance
# False if exact number wanted
if "override" in request.form:
override = True if request.form["override"] == "yes" else False
else:
override = False
print(override)
if "num" in request.form and "chance" in request.form:
num = int(request.form["chance"]) if override else int(request.form["num"])
elif "num" in request.form:
num = int(request.form["num"])
override = False
elif "chance" in request.form:
num = int(request.form["chance"])
override = True
else:
num = 0
print(num)
dl = "dl" in request.form
if app.config['TESTING']:
quiz = TrigGen.test_tex([norm, reci, invnorm, invreci], inc, num, timesNewRoman, override)
if quiz == ('', 204):
return ('', 204)
return send_file(BytesIO(quiz), as_attachment=dl, mimetype="text/x-tex",
attachment_filename="Speed Trig Quiz"+datetime.now().strftime(" %Y-%m-%d at %H.%M.%S.pdf"))
quiz = TrigGen.create_tex(title, [norm, reci, invnorm, invreci], inc, num, timesNewRoman, override)
if quiz == ('', 204):
return ('', 204)
return send_file(BytesIO(bytes(quiz)),
mimetype="application/pdf", as_attachment=dl,
attachment_filename="Speed Trig Quiz"+datetime.now().strftime(" %Y-%m-%d at %H.%M.%S.pdf"))
if __name__ == '__main__':
app.run(host='0.0.0.0', threaded=True, debug=True)
| 34.159574
| 124
| 0.632513
|
c4aa26015dffff7adbb4f6d69ec96e015e854359
| 758
|
py
|
Python
|
backend/swagger_server/helpers/_verify.py
|
Lend88/libresign
|
9537f39a696fa5f3433052406329d77d528b6cf9
|
[
"MIT"
] | 6
|
2019-01-29T05:58:37.000Z
|
2021-11-02T22:47:02.000Z
|
backend/swagger_server/helpers/_verify.py
|
Lend88/libresign
|
9537f39a696fa5f3433052406329d77d528b6cf9
|
[
"MIT"
] | 9
|
2020-09-09T04:53:01.000Z
|
2022-03-08T22:52:18.000Z
|
backend/swagger_server/helpers/_verify.py
|
Lend88/libresign
|
9537f39a696fa5f3433052406329d77d528b6cf9
|
[
"MIT"
] | 4
|
2019-01-29T07:38:55.000Z
|
2021-10-16T21:06:42.000Z
|
from uuid import UUID
from flask_jwt_extended import get_jwt_identity
from ..mappings import Document, Field
def verify_permission(session, doc_id, signer_accessible=True):
if not isinstance(doc_id, UUID):
raise ValueError("doc_id should be a UUID object")
uid = UUID(hex=get_jwt_identity())
owner = (
session
.query(Document)
.filter(Document.id == doc_id.bytes)
.filter(Document.user_id == uid.bytes)
.first()
is not None
)
return owner or (signer_accessible and (
session
.query(Field)
.filter(Field.document_id == doc_id.bytes)
.filter(Field.user_id == uid.bytes)
.first()
is not None
))
| 25.266667
| 63
| 0.601583
|
8d1b023e84af62363b2d34c29aaebd114f241990
| 3,356
|
py
|
Python
|
ngram_visualization.py
|
ABerry057/js-sotf
|
36ba930629595d2ec35da3aaeff982e50c7be008
|
[
"MIT"
] | null | null | null |
ngram_visualization.py
|
ABerry057/js-sotf
|
36ba930629595d2ec35da3aaeff982e50c7be008
|
[
"MIT"
] | null | null | null |
ngram_visualization.py
|
ABerry057/js-sotf
|
36ba930629595d2ec35da3aaeff982e50c7be008
|
[
"MIT"
] | null | null | null |
import pandas as pd
from tqdm import tqdm
from pathlib import Path
from unigram_analysis import *
import plotly.express as px
file_path = Path(__file__).resolve() # get path of this file
base_dir = file_path.parents[0] # get path of parent directory
unigram_dir = base_dir / 'ngram1' # get path to ngram1 subdir
figure_dir = base_dir / 'figures'
def unigram_by_years(years, atype, reference_df):
"""Given a range of years, returns a dataframe of unigrams and
associated counts from articles from those years
Arguments:
years {Range} -- For AJS data set, valid ranges of years
are between 1980 and 2014.
atype {String} -- 'research-article' or 'book-review'
reference_df {Pandas Dataframe} -- a dataframe with
'id' and 'year' columns.
"""
a_ids = []
for year in years:
ids = reference_df.loc[(reference_df['year'] == year) & (reference_df['type'] == atype)]['id'].values.tolist()
a_ids += ids
if a_ids == []:
raise ValueError("No articles for given range")
# a_ids = [id for sublist in a_ids for id in sublist] # flatten list of lists into 1D
# print(f"a_ids is: {a_ids}") # for debugging
raw_table = make_unigram_table(atype, a_ids)
lemmatized = lemmatize_table(raw_table)
no_num = remove_numerals(lemmatized)
no_stops = remove_stopwords(no_num, True)
return no_stops
def stringify_span(range):
"""Returns a nicely-formatted string representing a span of years.
Arguments:
range {range} - A range object
"""
if len(range) >= 2:
timespan = f"{range[0]}-{range[-1]}"
else:
timespan = range[0]
return timespan
def make_top_unigram_chart(reference_df, n, years, atype, plot_lib='px'):
"""Makes a barchart showing the top n most common unigrams
from unigram_df.
Arguments:
reference_df {Pandas Dataframe} -- a dataframe with
'id' and 'year' columns.
n {Integer} -- number of unigrams to include in the chart
years {Integer} -- Range of years to include in chart title.
For AJS data set, valid years are 1980 - 2014.
atype {String} -- 'research-article' or 'book-review' to
include in chart title
Keyword Arguments:
plot_lib {String} -- plotting library to use, valid choices
are 'px' and 'sns'. By default, 'px'
"""
unigram_df = unigram_by_years(years, atype, reference_df)
if atype == "book-review":
atype = "Book Reviews"
elif atype == "research-article":
atype = "Research Articles"
timespan = stringify_span(years)
top_n = unigram_df.nlargest(n, "count").sort_values(by='count', ascending=True)
fig = px.bar(top_n, x="count", y="word", text="count", orientation="h",
color_discrete_sequence=["black"],
title=f"Top {n} Unigram Counts from AJS {atype}: {timespan}",
labels={"count": "Count",
"word": "Word"
}
)
fig.update_xaxes(showticklabels=False)
fig.update_traces(textposition='outside')
fig.update_layout(uniformtext_minsize=24,
uniformtext_mode='hide',
margin=dict(l=20, r=20, t=40, b=20))
fig.layout.template = "plotly_white"
return fig
| 36.879121
| 118
| 0.629023
|
4cd356491a1529b6cf558e276a946ee9a63bce19
| 3,715
|
py
|
Python
|
app/controllers/admin/recipe.py
|
fredsonchaves07/ruifma
|
197222a16d475586e7f4674be0c2e0f36dccd5ae
|
[
"MIT"
] | null | null | null |
app/controllers/admin/recipe.py
|
fredsonchaves07/ruifma
|
197222a16d475586e7f4674be0c2e0f36dccd5ae
|
[
"MIT"
] | null | null | null |
app/controllers/admin/recipe.py
|
fredsonchaves07/ruifma
|
197222a16d475586e7f4674be0c2e0f36dccd5ae
|
[
"MIT"
] | null | null | null |
from app.dao import chef as chef_dao
from app.dao import recipe as recipe_dao
from app.controllers.admin import file as file_controller
from datetime import datetime
def list_recipes():
recipes = recipe_dao.all_recipes()
for recipe in recipes:
recipe_files_img = recipe_dao.find_recipe_file(recipe.id)
if recipe_files_img:
recipe.recipe_img = recipe_files_img[0].file.name
return recipes
def list_chef_recipe():
chefs = chef_dao.all_chef()
return [(chef[0], chef[2]) for chef in chefs]
def create_recipe(form, file):
recipe_name = form.name.data
recipe_chef = form.chef.data
recipe_date = form.date.data
recipe_ingredients = form.ingredients.data
recipe_preparations = form.preparations.data
recipe_adicional_information = form.adicional_information.data
recipe_id = recipe_dao.create_recipe(name=recipe_name,
recipe_date = recipe_date,
chef_id=recipe_chef,
ingredients=recipe_ingredients,
preparations=recipe_preparations,
adicional_information=recipe_adicional_information)
for recipe_img in file.getlist('recipe_img'):
file_id = file_controller.create_file(recipe_img)
recipe_dao.create_recipe_file(recipe_id=recipe_id, file_id=file_id)
return recipe_id
def show_recipe(recipe_id):
files = []
recipe = recipe_dao.find_recipe(recipe_id)
chef = chef_dao.find_chef(recipe.chef_id)
recipe_files = recipe_dao.find_recipe_file(recipe_id)
recipe.chef_name = chef.name
for recipe_file in recipe_files:
file = file_controller.find_file(recipe_file.file_id)
files.append(file)
recipe.recipe_img = files
return recipe
def edit_recipe(recipe_id, files, removed_files, form):
recipe_name = form.name.data
recipe_date = form.date.data
recipe_chef = form.chef.data
recipe_ingredients = form.ingredients.data
recipe_preparations = form.preparations.data
recipe_adicional_information = form.adicional_information.data
remove_files_id = removed_files.split(',')
for recipe_img in files.getlist('recipe_img'):
if recipe_img.filename:
file_id = file_controller.create_file(recipe_img)
recipe_dao.create_recipe_file(recipe_id=recipe_id, file_id=file_id)
if removed_files:
for file_id in remove_files_id:
recipe_dao.remove_recipe_file(recipe_id=recipe_id, file_id=file_id)
return recipe_dao.update_recipe(recipe_id=recipe_id,
name=recipe_name,
date=recipe_date,
chef_id=recipe_chef,
ingredients=recipe_ingredients,
preparations=recipe_preparations,
adicional_information=recipe_adicional_information)
def delete_recipe(recipe_id):
recipe = show_recipe(recipe_id)
list_recipe_imgs = recipe.recipe_img
for file in list_recipe_imgs:
recipe_dao.remove_recipe_file(recipe_id=recipe_id, file_id=file.id)
file_controller.remove_file(file)
recipe_dao.delete_recipe(recipe)
def filter_recipe(data_filter):
recipes = recipe_dao.filter_recipe(data_filter)
for recipe in recipes:
recipe_files_img = recipe_dao.find_recipe_file(recipe.id)
if recipe_files_img:
recipe.recipe_img = recipe_files_img[0].file.name
return recipes
| 33.468468
| 92
| 0.656528
|
8977d735327b63eff0019c61a5aa6e5b35a27132
| 9,216
|
py
|
Python
|
ckan/logic/auth/create.py
|
dbca-wa/ckan
|
922d0eb168e1dfd7a2a513f5be5b661b15b9ba08
|
[
"Apache-2.0"
] | null | null | null |
ckan/logic/auth/create.py
|
dbca-wa/ckan
|
922d0eb168e1dfd7a2a513f5be5b661b15b9ba08
|
[
"Apache-2.0"
] | null | null | null |
ckan/logic/auth/create.py
|
dbca-wa/ckan
|
922d0eb168e1dfd7a2a513f5be5b661b15b9ba08
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
import ckan.logic as logic
import ckan.authz as authz
import ckan.logic.auth as logic_auth
from ckan.common import _
@logic.auth_allow_anonymous_access
def package_create(context, data_dict=None):
user = context['user']
if authz.auth_is_anon_user(context):
check1 = all(authz.check_config_permission(p) for p in (
'anon_create_dataset',
'create_dataset_if_not_in_organization',
'create_unowned_dataset',
))
else:
check1 = all(authz.check_config_permission(p) for p in (
'create_dataset_if_not_in_organization',
'create_unowned_dataset',
)) or authz.has_user_permission_for_some_org(
user, 'create_dataset')
if not check1:
return {'success': False, 'msg': _('User %s not authorized to create packages') % user}
check2 = _check_group_auth(context,data_dict)
if not check2:
return {'success': False, 'msg': _('User %s not authorized to edit these groups') % user}
# If an organization is given are we able to add a dataset to it?
data_dict = data_dict or {}
org_id = data_dict.get('owner_org')
if org_id and not authz.has_user_permission_for_group_or_org(
org_id, user, 'create_dataset'):
return {'success': False, 'msg': _('User %s not authorized to add dataset to this organization') % user}
return {'success': True}
def file_upload(context, data_dict=None):
user = context['user']
if authz.auth_is_anon_user(context):
return {'success': False, 'msg': _('User %s not authorized to create packages') % user}
return {'success': True}
def resource_create(context, data_dict):
model = context['model']
user = context.get('user')
package_id = data_dict.get('package_id')
if not package_id and data_dict.get('id'):
# This can happen when auth is deferred, eg from `resource_view_create`
resource = logic_auth.get_resource_object(context, data_dict)
package_id = resource.package_id
if not package_id:
raise logic.NotFound(
_('No dataset id provided, cannot check auth.')
)
# check authentication against package
pkg = model.Package.get(package_id)
if not pkg:
raise logic.NotFound(
_('No package found for this resource, cannot check auth.')
)
pkg_dict = {'id': pkg.id}
authorized = authz.is_authorized('package_update', context, pkg_dict).get('success')
if not authorized:
return {'success': False,
'msg': _('User %s not authorized to create resources on dataset %s') %
(str(user), package_id)}
else:
return {'success': True}
def resource_view_create(context, data_dict):
return authz.is_authorized('resource_create', context, {'id': data_dict['resource_id']})
def resource_create_default_resource_views(context, data_dict):
return authz.is_authorized('resource_create', context, {'id': data_dict['resource']['id']})
def package_create_default_resource_views(context, data_dict):
return authz.is_authorized('package_update', context,
data_dict['package'])
def package_relationship_create(context, data_dict):
user = context['user']
id = data_dict['subject']
id2 = data_dict['object']
# If we can update each package we can see the relationships
authorized1 = authz.is_authorized_boolean(
'package_update', context, {'id': id})
authorized2 = authz.is_authorized_boolean(
'package_update', context, {'id': id2})
if not authorized1 and authorized2:
return {'success': False, 'msg': _('User %s not authorized to edit these packages') % user}
else:
return {'success': True}
def group_create(context, data_dict=None):
user = context['user']
user = authz.get_user_id_for_username(user, allow_none=True)
if user and authz.check_config_permission('user_create_groups'):
return {'success': True}
return {'success': False,
'msg': _('User %s not authorized to create groups') % user}
def organization_create(context, data_dict=None):
user = context['user']
user = authz.get_user_id_for_username(user, allow_none=True)
if user and authz.check_config_permission('user_create_organizations'):
return {'success': True}
return {'success': False,
'msg': _('User %s not authorized to create organizations') % user}
def rating_create(context, data_dict):
# No authz check in the logic function
return {'success': True}
@logic.auth_allow_anonymous_access
def user_create(context, data_dict=None):
using_api = 'api_version' in context
create_user_via_api = authz.check_config_permission(
'create_user_via_api')
create_user_via_web = authz.check_config_permission(
'create_user_via_web')
if using_api and not create_user_via_api:
return {'success': False, 'msg': _('User {user} not authorized to '
'create users via the API').format(user=context.get('user'))}
if not using_api and not create_user_via_web:
return {'success': False, 'msg': _('Not authorized to '
'create users')}
return {'success': True}
def user_invite(context, data_dict):
data_dict['id'] = data_dict['group_id']
return group_member_create(context, data_dict)
def _check_group_auth(context, data_dict):
'''Has this user got update permission for all of the given groups?
If there is a package in the context then ignore that package's groups.
(owner_org is checked elsewhere.)
:returns: False if not allowed to update one (or more) of the given groups.
True otherwise. i.e. True is the default. A blank data_dict
mentions no groups, so it returns True.
'''
# FIXME This code is shared amoung other logic.auth files and should be
# somewhere better
if not data_dict:
return True
model = context['model']
user = context['user']
pkg = context.get("package")
api_version = context.get('api_version') or '1'
group_blobs = data_dict.get('groups', [])
groups = set()
for group_blob in group_blobs:
# group_blob might be a dict or a group_ref
if isinstance(group_blob, dict):
# use group id by default, but we can accept name as well
id = group_blob.get('id') or group_blob.get('name')
if not id:
continue
else:
id = group_blob
grp = model.Group.get(id)
if grp is None:
raise logic.NotFound(_('Group was not found.'))
groups.add(grp)
if pkg:
pkg_groups = pkg.get_groups()
groups = groups - set(pkg_groups)
for group in groups:
if not authz.has_user_permission_for_group_or_org(group.id, user, 'manage_group'):
return False
return True
## Modifications for rest api
def package_create_rest(context, data_dict):
model = context['model']
user = context['user']
if not user:
return {'success': False, 'msg': _('Valid API key needed to create a package')}
return authz.is_authorized('package_create', context, data_dict)
def group_create_rest(context, data_dict):
model = context['model']
user = context['user']
if not user:
return {'success': False, 'msg': _('Valid API key needed to create a group')}
return authz.is_authorized('group_create', context, data_dict)
def vocabulary_create(context, data_dict):
# sysadmins only
return {'success': False}
def activity_create(context, data_dict):
# sysadmins only
return {'success': False}
def tag_create(context, data_dict):
# sysadmins only
return {'success': False}
def _group_or_org_member_create(context, data_dict):
user = context['user']
group_id = data_dict['id']
if not authz.has_user_permission_for_group_or_org(group_id, user, 'membership'):
return {'success': False, 'msg': _('User %s not authorized to add members') % user}
return {'success': True}
def organization_member_create(context, data_dict):
return _group_or_org_member_create(context, data_dict)
def group_member_create(context, data_dict):
return _group_or_org_member_create(context, data_dict)
def member_create(context, data_dict):
group = logic_auth.get_group_object(context, data_dict)
user = context['user']
# User must be able to update the group to add a member to it
permission = 'update'
# However if the user is member of group then they can add/remove datasets
if not group.is_organization and data_dict.get('object_type') == 'package':
permission = 'manage_group'
authorized = authz.has_user_permission_for_group_or_org(group.id,
user,
permission)
if not authorized:
return {'success': False,
'msg': _('User %s not authorized to edit group %s') %
(str(user), group.id)}
else:
return {'success': True}
| 34.777358
| 112
| 0.656467
|
a3c21945e0515e04e8d212b5076b6d0ea54186ab
| 2,856
|
py
|
Python
|
test/test_request.py
|
greysteil/gocardless-legacy-python
|
6805815c1ba726fbcb17e4083fb5ff4e08be8f86
|
[
"MIT"
] | null | null | null |
test/test_request.py
|
greysteil/gocardless-legacy-python
|
6805815c1ba726fbcb17e4083fb5ff4e08be8f86
|
[
"MIT"
] | null | null | null |
test/test_request.py
|
greysteil/gocardless-legacy-python
|
6805815c1ba726fbcb17e4083fb5ff4e08be8f86
|
[
"MIT"
] | 2
|
2017-11-10T20:44:49.000Z
|
2021-03-17T18:24:19.000Z
|
import unittest
import mock
#from gocardless import request
import gocardless.request
class RequestTestCase(unittest.TestCase):
def setUp(self):
self.request = gocardless.request.Request('get', 'http://test.com')
def test_valid_method_allows_valid_methods(self):
for method in ('get', 'post', 'put'):
self.assertTrue(self.request._valid_method('get'))
def test_valid_method_disallows_invalid_methods(self):
self.assertFalse(self.request._valid_method('fake_method'))
def test_use_bearer_auth_sets_auth_header(self):
self.request.use_bearer_auth('token')
self.assertEqual(self.request._opts['headers']['Authorization'],
'bearer token')
def test_use_http_auth_sets_auth_details_in_opts(self):
self.request.use_http_auth('user', 'pass')
self.assertEqual(self.request._opts['auth'], ('user', 'pass'))
def test_set_payload_ignores_null_payloads(self):
self.request.set_payload(None)
self.assertTrue('Content-Type' not in self.request._opts['headers'])
self.assertTrue('data' not in self.request._opts)
def test_set_payload_sets_content_type(self):
self.request.set_payload({'a': 'b'})
self.assertEqual(self.request._opts['headers']['Content-Type'],
'application/json')
def test_set_payload_encodes_payload(self):
self.request.set_payload({'a': 'b'})
self.assertEqual(self.request._opts['data'], '{"a": "b"}')
@mock.patch('gocardless.request.requests')
def test_perform_calls_get_for_gets(self, mock_requests):
mock_requests.get.return_value.content = '{"a": "b"}'
self.request.perform()
mock_requests.get.assert_called_once_with(mock.ANY, headers=mock.ANY)
@mock.patch('gocardless.request.requests')
def test_perform_passes_params_through(self, mock_requests):
params = {'x': 'y'}
request = gocardless.request.Request('get', 'http://test.com', params)
mock_requests.get.return_value.content = '{"a": "b"}'
request.perform()
mock_requests.get.assert_called_once_with(mock.ANY, headers=mock.ANY,
params=params)
@mock.patch('gocardless.request.requests')
def test_perform_calls_post_for_posts(self, mock_requests):
mock_requests.post.return_value.content = '{"a": "b"}'
self.request._method = 'post'
self.request.perform()
mock_requests.post.assert_called_once_with(mock.ANY, headers=mock.ANY)
@mock.patch('gocardless.request.requests.get')
def test_perform_decodes_json(self, mock_get):
response = mock.Mock()
response.json = lambda: {"a": "b"}
mock_get.return_value = response
self.assertEqual(self.request.perform(), {'a': 'b'})
| 39.666667
| 78
| 0.667017
|
a912ad8a78d950da6c62375f795f95e5b1b5662a
| 1,579
|
py
|
Python
|
send.py
|
Marusoftware/Marutools
|
2b462ea02abaf957eb037c281b62d7efe053840e
|
[
"MIT"
] | null | null | null |
send.py
|
Marusoftware/Marutools
|
2b462ea02abaf957eb037c281b62d7efe053840e
|
[
"MIT"
] | 5
|
2021-01-21T09:46:12.000Z
|
2022-02-14T13:54:44.000Z
|
send.py
|
Marusoftware/Marutools
|
2b462ea02abaf957eb037c281b62d7efe053840e
|
[
"MIT"
] | 2
|
2021-11-02T11:01:53.000Z
|
2022-02-14T10:11:21.000Z
|
#! /usr/bin/python3
import socket
import tkfilebrowser
import os
import time
from tkinter import simpledialog
import tkinter
tkinter.Tk().withdraw()
##name=simpledialog.askstring("name","name:")
##password=simpledialog.askstring("password","password:")
up_pbyte = 1024
file_name=tkfilebrowser.askopenfilename()
file_size = os.path.getsize(file_name)
version="b1.1"
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print("connecting...",end='')
r = s.connect(('192.168.1.14', 50001))
print("connected")
s.send(b'upload_program')
while 1:
data = s.recv(1024)
print('Client recieved data :'+str(data))
## if data == b"name:":
## s.send(bytes("name="+str(name),"utf-8"))
## elif data == b'password:':
## s.send(bytes("password="+password,"utf-8"))
if data == b'file_name:':
s.send(bytes("file_name=maruediter/"+version+"/"+os.path.basename(file_name),"utf-8"))
elif data == b'upload OK':
while data != b'upload START':
data = s.recv(1024)
if data == b'file_size:':
s.send(int(file_size).to_bytes(10, 'big'))
elif data == b'speed:':
s.send(int(up_pbyte).to_bytes(2, 'big'))
else:
break
s.settimeout(None)
f = open(file_name, "rb")
uploaded = 0
print("uploading...", end="")
while 1:
s.send(f.read(up_pbyte))
uploaded = uploaded + up_pbyte
if file_size <= uploaded:
break
break
else:
pass
print("completed!")
s.close()
| 29.792453
| 94
| 0.587714
|
7b8afbf5cbc5aa042cbf8af9bac429786da4d757
| 1,123
|
py
|
Python
|
module1-introduction-to-sql/buddymove_holidayiq.py
|
JohnMorrisonn/DS-Unit-3-Sprint-2-SQL-and-Databases
|
b924d0f37e6bcc191cdb0a448fd36d379a136c5f
|
[
"MIT"
] | null | null | null |
module1-introduction-to-sql/buddymove_holidayiq.py
|
JohnMorrisonn/DS-Unit-3-Sprint-2-SQL-and-Databases
|
b924d0f37e6bcc191cdb0a448fd36d379a136c5f
|
[
"MIT"
] | null | null | null |
module1-introduction-to-sql/buddymove_holidayiq.py
|
JohnMorrisonn/DS-Unit-3-Sprint-2-SQL-and-Databases
|
b924d0f37e6bcc191cdb0a448fd36d379a136c5f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# ## Load in the buddy dataset
# In[2]:
import pandas as pd
import sqlite3
# In[3]:
# Load in the dataset
buddy = pd.read_csv('buddymove_holidayiq.csv')
buddy.shape
# In[4]:
# Create an SQL connection to a blank dataset
conn = sqlite3.connect('buddymove_holidayiq.sqlite3')
# Put buddy csv into that SQL connection
buddy.to_sql('buddy', con=conn)
# In[5]:
# Create the connection cursor and check that it has the same number of rows
c = conn.cursor()
query = 'SELECT Count(*) FROM buddy'
c.execute(query).fetchall()
# In[6]:
# Take a further look into the data
query = 'SELECT * FROM buddy'
df = pd.read_sql(query, conn)
df
# ### How many users reviewd at least 100 Nature and 100 in shopping?
# In[12]:
query = '''SELECT COUNT('User Id') FROM buddy WHERE (Nature >= 100) & (Shopping >= 100)
'''
c.execute(query).fetchone()
# ### What are the average number of reviews for each category?
# In[33]:
query = '''SELECT AVG(Sports), AVG(Religious), AVG(Nature), AVG(Theatre), AVG(Shopping), AVG(Picnic) FROM buddy
'''
c.execute(query).fetchall()
| 14.973333
| 111
| 0.676759
|
2ad510840393706312e8a9042d8e681adabb92dd
| 5,918
|
py
|
Python
|
research/cv/3dcnn/train.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 77
|
2021-10-15T08:32:37.000Z
|
2022-03-30T13:09:11.000Z
|
research/cv/3dcnn/train.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 3
|
2021-10-30T14:44:57.000Z
|
2022-02-14T06:57:57.000Z
|
research/cv/3dcnn/train.py
|
leelige/mindspore
|
5199e05ba3888963473f2b07da3f7bca5b9ef6dc
|
[
"Apache-2.0"
] | 24
|
2021-10-15T08:32:45.000Z
|
2022-03-24T18:45:20.000Z
|
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
python train.py
"""
import os
import random
import mindspore.nn as nn
from mindspore import Tensor
from mindspore import context
from mindspore.common import set_seed
import mindspore.common.dtype as mstype
from mindspore.train.model import Model, ParallelMode
from mindspore.communication.management import init, get_rank, get_group_size
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
from mindspore.train.loss_scale_manager import FixedLossScaleManager
from src.models import Dense24
from src.lr_schedule import dynamic_lr
from src.dataset import create_dataset
from src.loss import NetWithLoss
from src.config import config
if config.isModelArts:
import moxing as mox
random.seed(1)
set_seed(1)
if __name__ == '__main__':
target = config.device_target
# init context
context.set_context(mode=context.GRAPH_MODE, device_target=target, save_graphs=False)
if config.run_distribute:
if target == "Ascend":
device_id = int(os.getenv('DEVICE_ID'))
context.set_context(device_id=device_id)
group_size = 8
context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL,
gradients_mean=False)
init()
else:
# target == "GPU"
init()
device_id = get_rank()
group_size = get_group_size()
context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL,
device_num=group_size,
gradients_mean=False)
else:
if target == "Ascend":
device_id = int(os.getenv('DEVICE_ID', '0'))
context.set_context(device_id=device_id)
else:
# target == "GPU"
device_id = int(config.device_id)
context.set_context(device_id=device_id)
group_size = 1
if config.isModelArts:
mox.file.copy_parallel(src_url=config.data_url, dst_url='/cache/dataset/device_{}'.format(device_id))
train_dataset_path = '/cache/dataset/device_{}'.format(device_id)
order = 'cd ' + train_dataset_path + ';'
order = order + 'tar -xzf MICCAI_BraTS17_Data_Training.tar.gz' + ';'
order = order + 'cd ../../../'
os.system(order)
train_dataset_path = os.path.join(train_dataset_path, "MICCAI_BraTS17_Data_Training/HGG")
else:
train_dataset_path = config.data_path
# create dataset
train_dataset = create_dataset(train_dataset_path, config.train_path, config.height_size, config.width_size,
config.channel_size, config.pred_size, config.batch_size, config.correction,
target=target, mindrecord_path=config.mindrecord_path,
use_mindrecord=config.use_mindrecord, group_size=group_size,
device_id=device_id)
train_data_size = train_dataset.get_dataset_size()
# create network
network = Dense24(config.num_classes)
net_with_loss = NetWithLoss(network, config.num_classes)
network.set_train(True)
rank_size = int(os.getenv("RANK_SIZE", "1"))
if config.use_dynamic_lr:
lr = Tensor(dynamic_lr(config, train_data_size, rank_size), mstype.float32)
else:
lr = Tensor(float(config.lr), mstype.float32)
if config.use_loss_scale:
loss_scale = config.loss_scale
scale_manager = FixedLossScaleManager(loss_scale=loss_scale, drop_overflow_update=True)
else:
scale_manager = None
loss_scale = 1.0
if config.use_optimizer == "SGD":
optimizer = nn.SGD(params=network.trainable_params(), learning_rate=lr, momentum=config.momentum,
weight_decay=config.weight_decay, nesterov=True)
elif config.use_optimizer == "Adam":
optimizer = nn.Adam(params=network.trainable_params(),
learning_rate=lr,
loss_scale=loss_scale)
model = Model(net_with_loss, optimizer=optimizer, loss_scale_manager=scale_manager)
# save checkpoint
time_cb = TimeMonitor(data_size=train_data_size)
loss_cb = LossMonitor()
ckpt_config = CheckpointConfig(save_checkpoint_steps=train_data_size,
keep_checkpoint_max=config.keep_checkpoint_max)
if config.isModelArts:
save_checkpoint_path = '/cache/train_output/device_{}/'.format(device_id)
else:
save_checkpoint_path = './result/ckpt_{}/'.format(device_id)
ckpoint_cb = ModelCheckpoint(prefix='{}'.format(config.model),
directory=save_checkpoint_path,
config=ckpt_config)
callbacks_list = [loss_cb, time_cb, ckpoint_cb]
print("============== Starting Training ==============")
model.train(config.epoch_size, train_dataset, callbacks=callbacks_list, dataset_sink_mode=config.dataset_sink_mode)
if config.isModelArts:
mox.file.copy_parallel(src_url='/cache/train_output', dst_url=config.train_url)
print("============== End Training ==============")
| 41.097222
| 119
| 0.654444
|
ca25c65333e9c220e666a2bb5413d22f1765ecb3
| 861
|
py
|
Python
|
special-palindrome-again/main.py
|
joaojunior/hackerrank
|
a5ee0449e791535930b8659dfb7dddcf9e1237de
|
[
"MIT"
] | null | null | null |
special-palindrome-again/main.py
|
joaojunior/hackerrank
|
a5ee0449e791535930b8659dfb7dddcf9e1237de
|
[
"MIT"
] | null | null | null |
special-palindrome-again/main.py
|
joaojunior/hackerrank
|
a5ee0449e791535930b8659dfb7dddcf9e1237de
|
[
"MIT"
] | 1
|
2019-06-19T00:51:02.000Z
|
2019-06-19T00:51:02.000Z
|
def _generate_substring(s):
size = len(s)
i = 0
while i < size:
j = i + 1
while j <= size:
yield s[i:j]
j += 1
i += 1
def _is_palindrome(s, memoize={}):
if s in memoize:
return memoize[s]
size = len(s)
mid = size // 2
result = False
if size == 1:
result = True
else:
count = 1
c = s[0]
i = 0
while i < size and count == 1:
if s[i] != c:
count += 1
i += 1
if count == 1:
result = True
else:
result = s[0:mid] == s[mid+1:size]
memoize[s] = result
return result
def substr_count(s):
count = 0
memoize = {}
for substr in _generate_substring(s):
if _is_palindrome(substr, memoize):
count += 1
return count
| 20.023256
| 46
| 0.445993
|
c0866c1069ac7f7e25cbd12cb5a490e2ed5e4bec
| 43,598
|
py
|
Python
|
tensorflow/python/grappler/hierarchical_controller.py
|
tianyapiaozi/tensorflow
|
fb3ce0467766a8e91f1da0ad7ada7c24fde7a73a
|
[
"Apache-2.0"
] | 71
|
2017-05-25T16:02:15.000Z
|
2021-06-09T16:08:08.000Z
|
tensorflow/python/grappler/hierarchical_controller.py
|
shrikunjsarda/tensorflow
|
7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae
|
[
"Apache-2.0"
] | 133
|
2017-04-26T16:49:49.000Z
|
2019-10-15T11:39:26.000Z
|
tensorflow/python/grappler/hierarchical_controller.py
|
shrikunjsarda/tensorflow
|
7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae
|
[
"Apache-2.0"
] | 31
|
2018-09-11T02:17:17.000Z
|
2021-12-15T10:33:35.000Z
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""HierarchicalController Class.
The HierarchicalController encompasses the entire lifecycle of training the
device placement policy, including generating op embeddings, getting groups for
each op, placing those groups and running the predicted placements.
Different assignment models can inherit from this class.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
import six
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops as tf_ops
from tensorflow.python.grappler.controller import Controller
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.summary import summary
from tensorflow.python.training import adam
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import learning_rate_decay
from tensorflow.python.training import training_util
class PlacerParams(object):
"""Class to hold a set of placement parameters as name-value pairs.
A typical usage is as follows:
```python
# Create a PlacerParams object specifying names and values of the model
# parameters:
params = PlacerParams(hidden_size=128, decay_steps=50)
# The parameters are available as attributes of the PlacerParams object:
hparams.hidden_size ==> 128
hparams.decay_steps ==> 50
```
"""
def __init__(self, **kwargs):
"""Create an instance of `PlacerParams` from keyword arguments.
The keyword arguments specify name-values pairs for the parameters.
The parameter types are inferred from the type of the values passed.
The parameter names are added as attributes of `PlacerParams` object,
and they can be accessed directly with the dot notation `params._name_`.
Example:
```python
# Define 1 parameter: 'hidden_size'
params = PlacerParams(hidden_size=128)
params.hidden_size ==> 128
```
Args:
**kwargs: Key-value pairs where the key is the parameter name and
the value is the value for the parameter.
"""
for name, value in six.iteritems(kwargs):
self.add_param(name, value)
def add_param(self, name, value):
"""Adds {name, value} pair to hyperparameters.
Args:
name: Name of the hyperparameter.
value: Value of the hyperparameter. Can be one of the following types:
int, float, string, int list, float list, or string list.
Raises:
ValueError: if one of the arguments is invalid.
"""
# Keys in kwargs are unique, but 'name' could be the name of a pre-existing
# attribute of this object. In that case we refuse to use it as a
# parameter name.
if getattr(self, name, None) is not None:
raise ValueError("Parameter name is reserved: %s" % name)
setattr(self, name, value)
def hierarchical_controller_hparams():
"""Hyperparameters for hierarchical planner."""
return PlacerParams(
hidden_size=512,
forget_bias_init=1.0,
temperature=1.0,
logits_std_noise=0.5,
stop_noise_step=750,
decay_steps=50,
max_num_outputs=5,
max_output_size=5,
tanh_constant=1.0,
adj_embed_dim=20,
grouping_hidden_size=64,
num_groups=None,
bi_lstm=True,
failing_signal=100,
stop_sampling=500,
start_with_failing_signal=True,
always_update_baseline=False,
bl_dec=0.9,
grad_bound=1.0,
lr=0.1,
lr_dec=0.95,
start_decay_step=400,
optimizer_type="adam",
stop_updating_after_steps=1000,
name="hierarchical_controller",
keep_prob=1.0,
reward_function="sqrt",
seed=1234,
# distributed training params
num_children=1)
class HierarchicalController(Controller):
"""HierarchicalController class."""
def __init__(self, hparams, item, cluster, controller_id=0):
"""HierarchicalController class initializer.
Args:
hparams: All hyper-parameters.
item: The metagraph to place.
cluster: The cluster of hardware devices to optimize for.
controller_id: the id of the controller in a multi-controller setup.
"""
super(HierarchicalController, self).__init__(item, cluster)
self.ctrl_id = controller_id
self.hparams = hparams
if self.hparams.num_groups is None:
self.num_groups = min(256, 20 * self.num_devices)
else:
self.num_groups = self.hparams.num_groups
# creates self.op_embeddings and self.type_dict
self.create_op_embeddings(verbose=False)
# TODO(azalia) clean up embedding/group_embedding_size names
self.group_emb_size = (
2 * self.num_groups + len(self.type_dict) +
self.hparams.max_num_outputs * self.hparams.max_output_size)
self.embedding_size = self.group_emb_size
self.initializer = init_ops.glorot_uniform_initializer(
seed=self.hparams.seed)
with variable_scope.variable_scope(
self.hparams.name,
initializer=self.initializer,
reuse=variable_scope.AUTO_REUSE):
# define parameters of feedforward
variable_scope.get_variable("w_grouping_ff", [
1 + self.hparams.max_num_outputs * self.hparams.max_output_size +
self.hparams.adj_embed_dim, self.hparams.grouping_hidden_size
])
variable_scope.get_variable(
"w_grouping_softmax",
[self.hparams.grouping_hidden_size, self.num_groups])
if self.hparams.bi_lstm:
variable_scope.get_variable("encoder_lstm_forward", [
self.embedding_size + self.hparams.hidden_size / 2,
2 * self.hparams.hidden_size
])
variable_scope.get_variable("encoder_lstm_backward", [
self.embedding_size + self.hparams.hidden_size / 2,
2 * self.hparams.hidden_size
])
variable_scope.get_variable(
"device_embeddings", [self.num_devices, self.hparams.hidden_size])
variable_scope.get_variable(
"decoder_lstm",
[2 * self.hparams.hidden_size, 4 * self.hparams.hidden_size])
variable_scope.get_variable(
"device_softmax", [2 * self.hparams.hidden_size, self.num_devices])
variable_scope.get_variable("device_go_embedding",
[1, self.hparams.hidden_size])
variable_scope.get_variable(
"encoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"decoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"attn_w_1", [self.hparams.hidden_size, self.hparams.hidden_size])
variable_scope.get_variable(
"attn_w_2", [self.hparams.hidden_size, self.hparams.hidden_size])
variable_scope.get_variable("attn_v", [self.hparams.hidden_size, 1])
else:
variable_scope.get_variable("encoder_lstm", [
self.embedding_size + self.hparams.hidden_size,
4 * self.hparams.hidden_size
])
variable_scope.get_variable(
"device_embeddings", [self.num_devices, self.hparams.hidden_size])
variable_scope.get_variable(
"decoder_lstm",
[2 * self.hparams.hidden_size, 4 * self.hparams.hidden_size])
variable_scope.get_variable(
"device_softmax", [2 * self.hparams.hidden_size, self.num_devices])
variable_scope.get_variable("device_go_embedding",
[1, self.hparams.hidden_size])
variable_scope.get_variable(
"encoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"decoder_forget_bias",
shape=1,
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
self.hparams.forget_bias_init))
variable_scope.get_variable(
"attn_w_1", [self.hparams.hidden_size, self.hparams.hidden_size])
variable_scope.get_variable(
"attn_w_2", [self.hparams.hidden_size, self.hparams.hidden_size])
variable_scope.get_variable("attn_v", [self.hparams.hidden_size, 1])
seq2seq_input_layer = array_ops.placeholder_with_default(
array_ops.zeros([self.hparams.num_children,
self.num_groups,
self.group_emb_size],
dtypes.float32),
shape=(self.hparams.num_children, self.num_groups, self.group_emb_size))
self.seq2seq_input_layer = seq2seq_input_layer
def compute_reward(self, run_time):
if self.hparams.reward_function == "id":
reward = run_time
elif self.hparams.reward_function == "sqrt":
reward = math.sqrt(run_time)
elif self.hparams.reward_function == "log":
reward = math.log1p(run_time)
else:
raise NotImplementedError(
"Unrecognized reward function '%s', consider your "
"--reward_function flag value." % self.hparams.reward_function)
return reward
def build_controller(self):
"""RL optimization interface.
Returns:
ops: A dictionary holding handles of the model used for training.
"""
self._global_step = training_util.get_or_create_global_step()
ops = {}
ops["loss"] = 0
failing_signal = self.compute_reward(self.hparams.failing_signal)
ctr = {}
with tf_ops.name_scope("controller_{}".format(self.ctrl_id)):
with variable_scope.variable_scope("controller_{}".format(self.ctrl_id)):
ctr["reward"] = {"value": [], "ph": [], "update": []}
ctr["ready"] = {"value": [], "ph": [], "update": []}
ctr["best_reward"] = {"value": [], "update": []}
for i in range(self.hparams.num_children):
reward_value = variable_scope.get_local_variable(
"reward_{}".format(i),
initializer=0.0,
dtype=dtypes.float32,
trainable=False)
reward_ph = array_ops.placeholder(
dtypes.float32, shape=(), name="reward_ph_{}".format(i))
reward_update = state_ops.assign(
reward_value, reward_ph, use_locking=True)
ctr["reward"]["value"].append(reward_value)
ctr["reward"]["ph"].append(reward_ph)
ctr["reward"]["update"].append(reward_update)
best_reward = variable_scope.get_local_variable(
"best_reward_{}".format(i),
initializer=failing_signal,
dtype=dtypes.float32,
trainable=False)
ctr["best_reward"]["value"].append(best_reward)
ctr["best_reward"]["update"].append(
state_ops.assign(best_reward,
math_ops.minimum(best_reward, reward_update)))
ready_value = variable_scope.get_local_variable(
"ready_{}".format(i),
initializer=True,
dtype=dtypes.bool,
trainable=False)
ready_ph = array_ops.placeholder(
dtypes.bool, shape=(), name="ready_ph_{}".format(i))
ready_update = state_ops.assign(
ready_value, ready_ph, use_locking=True)
ctr["ready"]["value"].append(ready_value)
ctr["ready"]["ph"].append(ready_ph)
ctr["ready"]["update"].append(ready_update)
ctr["grouping_y_preds"], ctr["grouping_log_probs"] = self.get_groupings()
summary.histogram(
"grouping_actions",
array_ops.slice(ctr["grouping_y_preds"]["sample"], [0, 0],
[1, array_ops.shape(self.op_embeddings)[0]]))
with variable_scope.variable_scope("controller_{}".format(self.ctrl_id)):
ctr["baseline"] = variable_scope.get_local_variable(
"baseline",
initializer=failing_signal
if self.hparams.start_with_failing_signal else 0.0,
dtype=dtypes.float32,
trainable=False)
new_baseline = self.hparams.bl_dec * ctr["baseline"] + (
1 - self.hparams.bl_dec) * math_ops.reduce_mean(
ctr["reward"]["value"])
if not self.hparams.always_update_baseline:
baseline_mask = math_ops.less(ctr["reward"]["value"], failing_signal)
selected_reward = array_ops.boolean_mask(ctr["reward"]["value"],
baseline_mask)
selected_baseline = control_flow_ops.cond(
math_ops.reduce_any(baseline_mask),
lambda: math_ops.reduce_mean(selected_reward),
lambda: constant_op.constant(0, dtype=dtypes.float32))
ctr["pos_reward"] = selected_baseline
pos_ = math_ops.less(
constant_op.constant(0, dtype=dtypes.float32), selected_baseline)
selected_baseline = self.hparams.bl_dec * ctr["baseline"] + (
1 - self.hparams.bl_dec) * selected_baseline
selected_baseline = control_flow_ops.cond(
pos_, lambda: selected_baseline, lambda: ctr["baseline"])
new_baseline = control_flow_ops.cond(
math_ops.less(self.global_step,
self.hparams.stop_updating_after_steps),
lambda: new_baseline, lambda: selected_baseline)
ctr["baseline_update"] = state_ops.assign(
ctr["baseline"], new_baseline, use_locking=True)
ctr["y_preds"], ctr["log_probs"] = self.get_placements()
summary.histogram("actions", ctr["y_preds"]["sample"])
mask = math_ops.less(ctr["reward"]["value"], failing_signal)
ctr["loss"] = ctr["reward"]["value"] - ctr["baseline"]
ctr["loss"] *= (
ctr["log_probs"]["sample"] + ctr["grouping_log_probs"]["sample"])
selected_loss = array_ops.boolean_mask(ctr["loss"], mask)
selected_loss = control_flow_ops.cond(
math_ops.reduce_any(mask),
lambda: math_ops.reduce_mean(-selected_loss),
lambda: constant_op.constant(0, dtype=dtypes.float32))
ctr["loss"] = control_flow_ops.cond(
math_ops.less(self.global_step,
self.hparams.stop_updating_after_steps),
lambda: math_ops.reduce_mean(-ctr["loss"]), lambda: selected_loss)
ctr["reward_s"] = math_ops.reduce_mean(ctr["reward"]["value"])
summary.scalar("loss", ctr["loss"])
summary.scalar("avg_reward", ctr["reward_s"])
summary.scalar("best_reward_so_far", best_reward)
summary.scalar(
"advantage",
math_ops.reduce_mean(ctr["reward"]["value"] - ctr["baseline"]))
with variable_scope.variable_scope(
"optimizer", reuse=variable_scope.AUTO_REUSE):
(ctr["train_op"], ctr["lr"], ctr["grad_norm"],
ctr["grad_norms"]) = self._get_train_ops(
ctr["loss"],
tf_ops.get_collection(tf_ops.GraphKeys.TRAINABLE_VARIABLES),
self.global_step,
grad_bound=self.hparams.grad_bound,
lr_init=self.hparams.lr,
lr_dec=self.hparams.lr_dec,
start_decay_step=self.hparams.start_decay_step,
decay_steps=self.hparams.decay_steps,
optimizer_type=self.hparams.optimizer_type)
summary.scalar("gradnorm", ctr["grad_norm"])
summary.scalar("lr", ctr["lr"])
ctr["summary"] = summary.merge_all()
ops["controller"] = ctr
self.ops = ops
return ops
@property
def global_step(self):
return self._global_step
def create_op_embeddings(self, verbose=False):
if verbose:
print("process input graph for op embeddings")
self.num_ops = len(self.important_ops)
# topological sort of important nodes
topo_order = [op.name for op in self.important_ops]
# create index to name for topologicaly sorted important nodes
name_to_topo_order_index = {}
for idx, x in enumerate(topo_order):
name_to_topo_order_index[x] = idx
self.name_to_topo_order_index = name_to_topo_order_index
# create adj matrix
adj_dict = {}
for idx, op in enumerate(self.important_ops):
for output_op in self.get_node_fanout(op):
output_op_name = output_op.name
if output_op_name in self.important_op_names:
if name_to_topo_order_index[op.name] not in adj_dict:
adj_dict[name_to_topo_order_index[op.name]] = []
adj_dict[name_to_topo_order_index[op.name]].extend(
[name_to_topo_order_index[output_op_name], 1])
if output_op_name not in adj_dict:
adj_dict[name_to_topo_order_index[output_op_name]] = []
adj_dict[name_to_topo_order_index[output_op_name]].extend(
[name_to_topo_order_index[op.name], -1])
# get op_type op_output_shape, and adj info
output_embed_dim = (self.hparams.max_num_outputs *
self.hparams.max_output_size)
# TODO(bsteiner): don't filter based on used ops so that we can generalize
# to models that use other types of ops.
used_ops = set()
for node in self.important_ops:
op_type = str(node.op)
used_ops.add(op_type)
self.type_dict = {}
for op_type in self.cluster.ListAvailableOps():
if op_type in used_ops:
self.type_dict[op_type] = len(self.type_dict)
op_types = np.zeros([self.num_ops], dtype=np.int32)
op_output_shapes = np.full(
[self.num_ops, output_embed_dim], -1.0, dtype=np.float32)
for idx, node in enumerate(self.important_ops):
op_types[idx] = self.type_dict[node.op]
# output shape
op_name = node.name
for i, output_prop in enumerate(self.node_properties[op_name]):
if output_prop.shape.__str__() == "<unknown>":
continue
shape = output_prop.shape
for j, dim in enumerate(shape.dim):
if dim.size >= 0:
if i * self.hparams.max_output_size + j >= output_embed_dim:
break
op_output_shapes[idx,
i * self.hparams.max_output_size + j] = dim.size
# adj for padding
op_adj = np.full(
[self.num_ops, self.hparams.adj_embed_dim], 0, dtype=np.float32)
for idx in adj_dict:
neighbors = adj_dict[int(idx)]
min_dim = min(self.hparams.adj_embed_dim, len(neighbors))
padding_size = self.hparams.adj_embed_dim - min_dim
neighbors = neighbors[:min_dim] + [0] * padding_size
op_adj[int(idx)] = neighbors
# op_embedding starts here
op_embeddings = np.zeros(
[
self.num_ops,
1 + self.hparams.max_num_outputs * self.hparams.max_output_size +
self.hparams.adj_embed_dim
],
dtype=np.float32)
for idx, op_name in enumerate(topo_order):
op_embeddings[idx] = np.concatenate(
(np.array([op_types[idx]]), op_output_shapes[idx], op_adj[int(idx)]))
self.op_embeddings = constant_op.constant(
op_embeddings, dtype=dtypes.float32)
if verbose:
print("num_ops = {}".format(self.num_ops))
print("num_types = {}".format(len(self.type_dict)))
def get_groupings(self, *args, **kwargs):
num_children = self.hparams.num_children
with variable_scope.variable_scope("controller_{}".format(self.ctrl_id)):
grouping_actions_cache = variable_scope.get_local_variable(
"grouping_actions_cache",
initializer=init_ops.zeros_initializer,
dtype=dtypes.int32,
shape=[num_children, self.num_ops],
trainable=False)
input_layer = self.op_embeddings
input_layer = array_ops.expand_dims(input_layer, 0)
feed_ff_input_layer = array_ops.tile(input_layer, [num_children, 1, 1])
grouping_actions, grouping_log_probs = {}, {}
grouping_actions["sample"], grouping_log_probs[
"sample"] = self.make_grouping_predictions(feed_ff_input_layer)
grouping_actions["sample"] = state_ops.assign(grouping_actions_cache,
grouping_actions["sample"])
self.grouping_actions_cache = grouping_actions_cache
return grouping_actions, grouping_log_probs
def make_grouping_predictions(self, input_layer, reuse=None):
"""model that predicts grouping (grouping_actions).
Args:
input_layer: group_input_layer
reuse: reuse
Returns:
grouping_actions: actions
grouping_log_probs: log probabilities corresponding to actions
"""
with variable_scope.variable_scope(self.hparams.name, reuse=True):
# input_layer: tensor of size [1, num_ops, hidden_size]
w_grouping_ff = variable_scope.get_variable("w_grouping_ff")
w_grouping_softmax = variable_scope.get_variable("w_grouping_softmax")
batch_size = array_ops.shape(input_layer)[0]
embedding_dim = array_ops.shape(input_layer)[2]
reshaped = array_ops.reshape(input_layer,
[batch_size * self.num_ops, embedding_dim])
ff_output = math_ops.matmul(reshaped, w_grouping_ff)
logits = math_ops.matmul(ff_output, w_grouping_softmax)
if self.hparams.logits_std_noise > 0:
num_in_logits = math_ops.cast(
array_ops.size(logits), dtype=dtypes.float32)
avg_norm = math_ops.divide(
linalg_ops.norm(logits), math_ops.sqrt(num_in_logits))
logits_noise = random_ops.random_normal(
array_ops.shape(logits),
stddev=self.hparams.logits_std_noise * avg_norm)
logits = control_flow_ops.cond(
self.global_step > self.hparams.stop_noise_step, lambda: logits,
lambda: logits + logits_noise)
logits = array_ops.reshape(logits,
[batch_size * self.num_ops, self.num_groups])
actions = random_ops.multinomial(logits, 1, seed=self.hparams.seed)
actions = math_ops.to_int32(actions)
actions = array_ops.reshape(actions, [batch_size, self.num_ops])
action_label = array_ops.reshape(actions, [-1])
log_probs = nn_ops.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=action_label)
log_probs = array_ops.reshape(log_probs, [batch_size, -1])
log_probs = math_ops.reduce_sum(log_probs, 1)
grouping_actions = actions
grouping_log_probs = log_probs
return grouping_actions, grouping_log_probs
def create_group_embeddings(self, grouping_actions, verbose=False):
"""Approximating the blocks of a TF graph from a graph_def.
Args:
grouping_actions: grouping predictions.
verbose: print stuffs.
Returns:
groups: list of groups.
"""
groups = [
self._create_group_embeddings(grouping_actions, i, verbose) for
i in range(self.hparams.num_children)
]
return np.stack(groups, axis=0)
def _create_group_embeddings(self, grouping_actions, child_id, verbose=False):
"""Approximating the blocks of a TF graph from a graph_def for each child.
Args:
grouping_actions: grouping predictions.
child_id: child_id for the group.
verbose: print stuffs.
Returns:
groups: group embedding for the child_id.
"""
if verbose:
print("Processing input_graph")
# TODO(azalia): Build inter-adjacencies dag matrix.
# record dag_matrix
dag_matrix = np.zeros([self.num_groups, self.num_groups], dtype=np.float32)
for op in self.important_ops:
topo_op_index = self.name_to_topo_order_index[op.name]
group_index = grouping_actions[child_id][topo_op_index]
for output_op in self.get_node_fanout(op):
if output_op.name not in self.important_op_names:
continue
output_group_index = (
grouping_actions[child_id][self.name_to_topo_order_index[
output_op.name]])
dag_matrix[group_index, output_group_index] += 1.0
num_connections = np.sum(dag_matrix)
num_intra_group_connections = dag_matrix.trace()
num_inter_group_connections = num_connections - num_intra_group_connections
if verbose:
print("grouping evaluation metric")
print(("num_connections={} num_intra_group_connections={} "
"num_inter_group_connections={}").format(
num_connections, num_intra_group_connections,
num_inter_group_connections))
self.dag_matrix = dag_matrix
# output_shape
op_output_shapes = np.zeros(
[
len(self.important_ops),
self.hparams.max_num_outputs * self.hparams.max_output_size
],
dtype=np.float32)
for idx, op in enumerate(self.important_ops):
for i, output_properties in enumerate(self.node_properties[op.name]):
if output_properties.shape.__str__() == "<unknown>":
continue
if i > self.hparams.max_num_outputs:
break
shape = output_properties.shape
for j, dim in enumerate(shape.dim):
if dim.size > 0:
k = i * self.hparams.max_output_size + j
if k >= self.hparams.max_num_outputs * self.hparams.max_output_size:
break
op_output_shapes[idx, k] = dim.size
# group_embedding
group_embedding = np.zeros(
[
self.num_groups, len(self.type_dict) +
self.hparams.max_num_outputs * self.hparams.max_output_size
],
dtype=np.float32)
for op_index, op in enumerate(self.important_ops):
group_index = grouping_actions[child_id][
self.name_to_topo_order_index[op.name]]
type_name = str(op.op)
type_index = self.type_dict[type_name]
group_embedding[group_index, type_index] += 1
group_embedding[group_index, :self.hparams.max_num_outputs * self.hparams.
max_output_size] += (
op_output_shapes[op_index])
grouping_adjacencies = np.concatenate(
[dag_matrix, np.transpose(dag_matrix)], axis=1)
group_embedding = np.concatenate(
[grouping_adjacencies, group_embedding], axis=1)
group_normalizer = np.amax(group_embedding, axis=1, keepdims=True)
group_embedding /= (group_normalizer + 1.0)
if verbose:
print("Finished Processing Input Graph")
return group_embedding
def get_placements(self, *args, **kwargs):
num_children = self.hparams.num_children
with variable_scope.variable_scope("controller_{}".format(self.ctrl_id)):
actions_cache = variable_scope.get_local_variable(
"actions_cache",
initializer=init_ops.zeros_initializer,
dtype=dtypes.int32,
shape=[num_children, self.num_groups],
trainable=False)
x = self.seq2seq_input_layer
last_c, last_h, attn_mem = self.encode(x)
actions, log_probs = {}, {}
actions["sample"], log_probs["sample"] = (
self.decode(
x, last_c, last_h, attn_mem, mode="sample"))
actions["target"], log_probs["target"] = (
self.decode(
x,
last_c,
last_h,
attn_mem,
mode="target",
y=actions_cache))
actions["greedy"], log_probs["greedy"] = (
self.decode(
x, last_c, last_h, attn_mem, mode="greedy"))
actions["sample"] = control_flow_ops.cond(
self.global_step < self.hparams.stop_sampling,
lambda: state_ops.assign(actions_cache, actions["sample"]),
lambda: state_ops.assign(actions_cache, actions["target"]))
self.actions_cache = actions_cache
return actions, log_probs
def encode(self, x):
"""Encoder using LSTM.
Args:
x: tensor of size [num_children, num_groups, embedding_size]
Returns:
last_c, last_h: tensors of size [num_children, hidden_size], the final
LSTM states
attn_mem: tensor of size [num_children, num_groups, hidden_size], the
attention
memory, i.e. concatenation of all hidden states, linearly transformed by
an attention matrix attn_w_1
"""
if self.hparams.bi_lstm:
with variable_scope.variable_scope(self.hparams.name, reuse=True):
w_lstm_forward = variable_scope.get_variable("encoder_lstm_forward")
w_lstm_backward = variable_scope.get_variable("encoder_lstm_backward")
forget_bias = variable_scope.get_variable("encoder_forget_bias")
attn_w_1 = variable_scope.get_variable("attn_w_1")
else:
with variable_scope.variable_scope(self.hparams.name, reuse=True):
w_lstm = variable_scope.get_variable("encoder_lstm")
forget_bias = variable_scope.get_variable("encoder_forget_bias")
attn_w_1 = variable_scope.get_variable("attn_w_1")
embedding_size = array_ops.shape(x)[2]
signals = array_ops.split(x, self.num_groups, axis=1)
for i in range(len(signals)):
signals[i] = array_ops.reshape(
signals[i], [self.hparams.num_children, embedding_size])
if self.hparams.bi_lstm:
def body(i, prev_c_forward, prev_h_forward, prev_c_backward,
prev_h_backward):
"""while loop for LSTM."""
signal_forward = signals[i]
next_c_forward, next_h_forward = lstm(signal_forward, prev_c_forward,
prev_h_forward, w_lstm_forward,
forget_bias)
signal_backward = signals[self.num_groups - 1 - i]
next_c_backward, next_h_backward = lstm(
signal_backward, prev_c_backward, prev_h_backward, w_lstm_backward,
forget_bias)
next_h = array_ops.concat([next_h_forward, next_h_backward], axis=1)
all_h.append(next_h)
return (next_c_forward, next_h_forward, next_c_backward,
next_h_backward)
c_forward = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size / 2],
dtype=dtypes.float32)
h_forward = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size / 2],
dtype=dtypes.float32)
c_backward = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size / 2],
dtype=dtypes.float32)
h_backward = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size / 2],
dtype=dtypes.float32)
all_h = []
for i in range(0, self.num_groups):
c_forward, h_forward, c_backward, h_backward = body(
i, c_forward, h_forward, c_backward, h_backward)
last_c = array_ops.concat([c_forward, c_backward], axis=1)
last_h = array_ops.concat([h_forward, h_backward], axis=1)
attn_mem = array_ops.stack(all_h)
else:
def body(i, prev_c, prev_h):
signal = signals[i]
next_c, next_h = lstm(signal, prev_c, prev_h, w_lstm, forget_bias)
all_h.append(next_h)
return next_c, next_h
c = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size],
dtype=dtypes.float32)
h = array_ops.zeros(
[self.hparams.num_children, self.hparams.hidden_size],
dtype=dtypes.float32)
all_h = []
for i in range(0, self.num_groups):
c, h = body(i, c, h)
last_c = c
last_h = h
attn_mem = array_ops.stack(all_h)
attn_mem = array_ops.transpose(attn_mem, [1, 0, 2])
attn_mem = array_ops.reshape(
attn_mem,
[self.hparams.num_children * self.num_groups, self.hparams.hidden_size])
attn_mem = math_ops.matmul(attn_mem, attn_w_1)
attn_mem = array_ops.reshape(
attn_mem,
[self.hparams.num_children, self.num_groups, self.hparams.hidden_size])
return last_c, last_h, attn_mem
def decode(self,
x,
last_c,
last_h,
attn_mem,
mode="target",
y=None):
"""Decoder using LSTM.
Args:
x: tensor of size [num_children, num_groups, embedding_size].
last_c: tensor of size [num_children, hidden_size], the final LSTM states
computed by self.encoder.
last_h: same as last_c.
attn_mem: tensor of size [num_children, num_groups, hidden_size].
mode: "target" or "sample".
y: tensor of size [num_children, num_groups], the device placements.
Returns:
actions: tensor of size [num_children, num_groups], the placements of
devices
"""
with variable_scope.variable_scope(self.hparams.name, reuse=True):
w_lstm = variable_scope.get_variable("decoder_lstm")
forget_bias = variable_scope.get_variable("decoder_forget_bias")
device_embeddings = variable_scope.get_variable("device_embeddings")
device_softmax = variable_scope.get_variable("device_softmax")
device_go_embedding = variable_scope.get_variable("device_go_embedding")
attn_w_2 = variable_scope.get_variable("attn_w_2")
attn_v = variable_scope.get_variable("attn_v")
actions = tensor_array_ops.TensorArray(
dtypes.int32,
size=self.num_groups,
infer_shape=False,
clear_after_read=False)
# pylint: disable=unused-argument
def condition(i, *args):
return math_ops.less(i, self.num_groups)
# pylint: disable=missing-docstring
def body(i, prev_c, prev_h, actions, log_probs):
# pylint: disable=g-long-lambda
signal = control_flow_ops.cond(
math_ops.equal(i, 0),
lambda: array_ops.tile(device_go_embedding,
[self.hparams.num_children, 1]),
lambda: embedding_ops.embedding_lookup(device_embeddings,
actions.read(i - 1))
)
if self.hparams.keep_prob is not None:
signal = nn_ops.dropout(signal, self.hparams.keep_prob)
next_c, next_h = lstm(signal, prev_c, prev_h, w_lstm, forget_bias)
query = math_ops.matmul(next_h, attn_w_2)
query = array_ops.reshape(
query, [self.hparams.num_children, 1, self.hparams.hidden_size])
query = math_ops.tanh(query + attn_mem)
query = array_ops.reshape(query, [
self.hparams.num_children * self.num_groups, self.hparams.hidden_size
])
query = math_ops.matmul(query, attn_v)
query = array_ops.reshape(query,
[self.hparams.num_children, self.num_groups])
query = nn_ops.softmax(query)
query = array_ops.reshape(query,
[self.hparams.num_children, self.num_groups, 1])
query = math_ops.reduce_sum(attn_mem * query, axis=1)
query = array_ops.concat([next_h, query], axis=1)
logits = math_ops.matmul(query, device_softmax)
logits /= self.hparams.temperature
if self.hparams.tanh_constant > 0:
logits = math_ops.tanh(logits) * self.hparams.tanh_constant
if self.hparams.logits_std_noise > 0:
num_in_logits = math_ops.cast(
array_ops.size(logits), dtype=dtypes.float32)
avg_norm = math_ops.divide(
linalg_ops.norm(logits), math_ops.sqrt(num_in_logits))
logits_noise = random_ops.random_normal(
array_ops.shape(logits),
stddev=self.hparams.logits_std_noise * avg_norm)
logits = control_flow_ops.cond(
self.global_step > self.hparams.stop_noise_step, lambda: logits,
lambda: logits + logits_noise)
if mode == "sample":
next_y = random_ops.multinomial(logits, 1, seed=self.hparams.seed)
elif mode == "greedy":
next_y = math_ops.argmax(logits, 1)
elif mode == "target":
next_y = array_ops.slice(y, [0, i], [-1, 1])
else:
raise NotImplementedError
next_y = math_ops.to_int32(next_y)
next_y = array_ops.reshape(next_y, [self.hparams.num_children])
actions = actions.write(i, next_y)
log_probs += nn_ops.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=next_y)
return i + 1, next_c, next_h, actions, log_probs
loop_vars = [
constant_op.constant(0, dtype=dtypes.int32), last_c, last_h, actions,
array_ops.zeros([self.hparams.num_children], dtype=dtypes.float32)
]
loop_outputs = control_flow_ops.while_loop(condition, body, loop_vars)
last_c = loop_outputs[-4]
last_h = loop_outputs[-3]
actions = loop_outputs[-2].stack()
actions = array_ops.transpose(actions, [1, 0])
log_probs = loop_outputs[-1]
return actions, log_probs
def eval_placement(self,
sess,
child_id=0,
verbose=False):
grouping_actions, actions = sess.run([
self.grouping_actions_cache,
self.actions_cache
])
grouping_actions = grouping_actions[child_id]
actions = actions[child_id]
if verbose:
global_step = sess.run(self.global_step)
if global_step % 100 == 0:
log_string = "op group assignments: "
for a in grouping_actions:
log_string += "{} ".format(a)
print(log_string[:-1])
log_string = "group device assignments: "
for a in actions:
log_string += "{} ".format(a)
print(log_string[:-1])
for op in self.important_ops:
topo_order_index = self.name_to_topo_order_index[op.name]
group_index = grouping_actions[topo_order_index]
op.device = self.devices[actions[group_index]].name
try:
_, run_time, _ = self.cluster.MeasureCosts(self.item)
except errors.ResourceExhaustedError:
run_time = self.hparams.failing_signal
return run_time
def update_reward(self,
sess,
run_time,
child_id=0,
verbose=False):
reward = self.compute_reward(run_time)
controller_ops = self.ops["controller"]
_, best_reward = sess.run(
[
controller_ops["reward"]["update"][child_id],
controller_ops["best_reward"]["update"][child_id]
],
feed_dict={
controller_ops["reward"]["ph"][child_id]: reward,
})
if verbose:
print(("run_time={:<.5f} reward={:<.5f} "
"best_reward={:<.5f}").format(run_time, reward, best_reward))
# Reward is a double, best_reward a float: allow for some slack in the
# comparison.
updated = abs(best_reward - reward) < 1e-6
return updated
def generate_grouping(self, sess):
controller_ops = self.ops["controller"]
grouping_actions = sess.run(controller_ops["grouping_y_preds"]["sample"])
return grouping_actions
def generate_placement(self, grouping, sess):
controller_ops = self.ops["controller"]
feed_seq2seq_input_dict = {}
feed_seq2seq_input_dict[self.seq2seq_input_layer] = grouping
sess.run(
controller_ops["y_preds"]["sample"], feed_dict=feed_seq2seq_input_dict)
def process_reward(self, sess):
controller_ops = self.ops["controller"]
run_ops = [
controller_ops["loss"], controller_ops["lr"],
controller_ops["grad_norm"], controller_ops["grad_norms"],
controller_ops["train_op"]
]
sess.run(run_ops)
sess.run(controller_ops["baseline_update"])
def _get_train_ops(self,
loss,
tf_variables,
global_step,
grad_bound=1.25,
lr_init=1e-3,
lr_dec=0.9,
start_decay_step=10000,
decay_steps=100,
optimizer_type="adam"):
"""Loss optimizer.
Args:
loss: scalar tf tensor
tf_variables: list of training variables, typically
tf.trainable_variables()
global_step: global_step
grad_bound: max gradient norm
lr_init: initial learning rate
lr_dec: leaning rate decay coefficient
start_decay_step: start decaying learning rate after this many steps
decay_steps: apply decay rate factor at this step intervals
optimizer_type: optimizer type should be either adam or sgd
Returns:
train_op: training op
learning_rate: scalar learning rate tensor
grad_norm: l2 norm of the gradient vector
all_grad_norms: l2 norm of each component
"""
lr_gstep = global_step - start_decay_step
def f1():
return constant_op.constant(lr_init)
def f2():
return learning_rate_decay.exponential_decay(lr_init, lr_gstep,
decay_steps, lr_dec, True)
learning_rate = control_flow_ops.cond(
math_ops.less(global_step, start_decay_step),
f1,
f2,
name="learning_rate")
if optimizer_type == "adam":
opt = adam.AdamOptimizer(learning_rate)
elif optimizer_type == "sgd":
opt = gradient_descent.GradientDescentOptimizer(learning_rate)
grads_and_vars = opt.compute_gradients(loss, tf_variables)
grad_norm = clip_ops.global_norm([g for g, v in grads_and_vars])
all_grad_norms = {}
clipped_grads = []
clipped_rate = math_ops.maximum(grad_norm / grad_bound, 1.0)
for g, v in grads_and_vars:
if g is not None:
if isinstance(g, tf_ops.IndexedSlices):
clipped = g.values / clipped_rate
norm_square = math_ops.reduce_sum(clipped * clipped)
clipped = tf_ops.IndexedSlices(clipped, g.indices)
else:
clipped = g / clipped_rate
norm_square = math_ops.reduce_sum(clipped * clipped)
all_grad_norms[v.name] = math_ops.sqrt(norm_square)
clipped_grads.append((clipped, v))
train_op = opt.apply_gradients(clipped_grads, global_step)
return train_op, learning_rate, grad_norm, all_grad_norms
def lstm(x, prev_c, prev_h, w_lstm, forget_bias):
"""LSTM cell.
Args:
x: tensors of size [num_children, hidden_size].
prev_c: tensors of size [num_children, hidden_size].
prev_h: same as prev_c.
w_lstm: .
forget_bias: .
Returns:
next_c:
next_h:
"""
ifog = math_ops.matmul(array_ops.concat([x, prev_h], axis=1), w_lstm)
i, f, o, g = array_ops.split(ifog, 4, axis=1)
i = math_ops.sigmoid(i)
f = math_ops.sigmoid(f + forget_bias)
o = math_ops.sigmoid(o)
g = math_ops.tanh(g)
next_c = i * g + f * prev_c
next_h = o * math_ops.tanh(next_c)
return next_c, next_h
| 38.996422
| 80
| 0.655649
|
52eb07b58fd3e205f2210e941724c8cea088f748
| 9,147
|
py
|
Python
|
teknologr/members/migrations/0001_initial.py
|
Pingviinituutti/teknologr.io
|
ac172f773819804c3124256eeda62d3d259baaec
|
[
"MIT"
] | null | null | null |
teknologr/members/migrations/0001_initial.py
|
Pingviinituutti/teknologr.io
|
ac172f773819804c3124256eeda62d3d259baaec
|
[
"MIT"
] | null | null | null |
teknologr/members/migrations/0001_initial.py
|
Pingviinituutti/teknologr.io
|
ac172f773819804c3124256eeda62d3d259baaec
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2017-03-06 23:16
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django_countries.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Decoration',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=64, unique=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='DecorationOwnership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('acquired', models.DateField()),
('decoration', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='members.Decoration')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Functionary',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('begin_date', models.DateField()),
('end_date', models.DateField()),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='FunctionaryType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=64, unique=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('begin_date', models.DateField()),
('end_date', models.DateField()),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='GroupMembership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='members.Group')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='GroupType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=64, unique=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('given_names', models.CharField(default='UNKNOWN', max_length=64)),
('preferred_name', models.CharField(default='UNKNOWN', max_length=32)),
('surname', models.CharField(default='UNKNOWN', max_length=32)),
('maiden_name', models.CharField(blank=True, default='', max_length=32)),
('nickname', models.CharField(blank=True, default='', max_length=32)),
('birth_date', models.DateField(blank=True, null=True)),
('student_id', models.CharField(blank=True, default='', max_length=10)),
('gender', models.CharField(choices=[('UN', 'Okänd'), ('M', 'Man'), ('F', 'Kvinna')], default='UN', max_length=2)),
('nationality', django_countries.fields.CountryField(blank=True, default='', max_length=2)),
('enrolment_year', models.IntegerField(blank=True, null=True)),
('graduated', models.BooleanField(default=False)),
('graduated_year', models.IntegerField(blank=True, null=True)),
('degree_programme', models.CharField(choices=[('UN', 'Okänd'), ('ARTS - Högskolan för konst, design och arkitektur', (('ARK', 'Arkitektur'), ('BP', 'Bildkonstpedagogik'), ('DS', 'Design'), ('DK', 'Dokumentärfilm'), ('FM', 'Film- och tv-manuskript'), ('FP', 'Film- och tv-produktion'), ('FS', 'Film- och tv-scenografi'), ('FK', 'Filmklippning'), ('FL', 'Filmljudinspelning och -ljudplanering'), ('FI', 'Filmning'), ('FR', 'Filmregi'), ('GD', 'Grafisk design'), ('IA', 'Inredningsarkitektur'), ('KD', 'Kostymdesign'), ('LA', 'Landskapsarkitektur'), ('MD', 'Mode'), ('TS', 'Teaterscenografi'))), ('BIZ - Handelshögskolan', (('KT', 'Kauppatieteet'),)), ('CHEM - Högskolan för kemiteknik', (('KB', 'Kemi-, bio- och materialteknik'),)), ('ELEC - Högskolan för elektroteknik', (('AI', 'Automations- och informationsteknologi'), ('BI', 'Bioinformationsteknologi'), ('EL', 'Elektronik och elektroteknik'))), ('ENG - Högskolan för ingenjörsvetenskaper', (('BM', 'Den byggda miljön'), ('EM', 'Energi- och miljöteknik'), ('MB', 'Maskin- och byggnadsteknik'))), ('SCI - Högskolan för teknikvetenskaper', (('DT', 'Datateknik'), ('IN', 'Informationsnätverk'), ('PE', 'Produktionsekonomi'), ('TF', 'Teknisk fysik och matematik')))], default='UN', max_length=256)),
('stalm', models.BooleanField(default=False)),
('dead', models.BooleanField(default=False)),
('mobile_phone', models.CharField(blank=True, default='', max_length=20)),
('phone', models.CharField(blank=True, default='', max_length=20)),
('street_address', models.CharField(blank=True, default='', max_length=64)),
('postal_code', models.CharField(blank=True, default='', max_length=64)),
('city', models.CharField(blank=True, default='', max_length=64)),
('country', django_countries.fields.CountryField(blank=True, default='', max_length=2)),
('url', models.CharField(blank=True, default='', max_length=64)),
('email', models.CharField(blank=True, default='', max_length=64)),
('subscribed_to_modulen', models.BooleanField(default=False)),
('allow_publish_info', models.BooleanField(default=True)),
('username', models.CharField(blank=True, default='', max_length=32)),
('crm_id', models.CharField(blank=True, default='', max_length=32)),
('comment', models.TextField(blank=True, null=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='groupmembership',
name='member',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='members.Member'),
),
migrations.AddField(
model_name='group',
name='grouptype',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='members.GroupType'),
),
migrations.AddField(
model_name='functionary',
name='functionarytype',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='members.FunctionaryType'),
),
migrations.AddField(
model_name='functionary',
name='member',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='members.Member'),
),
migrations.AddField(
model_name='decorationownership',
name='member',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='members.Member'),
),
]
| 53.805882
| 1,265
| 0.568711
|
feaba223d7309652460f25d955da6dda1552aca7
| 1,403
|
py
|
Python
|
sols/917.py
|
Paul11100/LeetCode
|
9896c579dff1812c0c76964db8d60603ee715e35
|
[
"MIT"
] | null | null | null |
sols/917.py
|
Paul11100/LeetCode
|
9896c579dff1812c0c76964db8d60603ee715e35
|
[
"MIT"
] | null | null | null |
sols/917.py
|
Paul11100/LeetCode
|
9896c579dff1812c0c76964db8d60603ee715e35
|
[
"MIT"
] | null | null | null |
class Solution:
# # One Pass, Two Pointers, if-while-while loop (Accepted), O(n) time and space
# def reverseOnlyLetters(self, s: str) -> str:
# n = len(s)
# l, r = 0, n-1
# s = list(s)
# while l < r:
# if s[l].isalpha() and s[r].isalpha():
# s[l], s[r] = s[r], s[l]
# l, r = l+1, r-1
# while l < n and not s[l].isalpha():
# l += 1
# while r >= 0 and not s[r].isalpha():
# r -= 1
# return "".join(s)
# # Two Pointers, One pass, while-while loop (Top Voted), O(n) time, O(n) space
# def reverseOnlyLetters(self, S: str) -> str:
# i, j = 0, len(S) - 1
# S = list(S)
# while i < j:
# while i < j and not S[i].isalpha(): i += 1
# while i < j and not S[j].isalpha(): j -= 1
# S[i], S[j] = S[j], S[i]
# i, j = i + 1, j - 1
# return "".join(S)
# Two Pointers, One pass, while-if loop (Top Voted), O(n) time, O(n) space
def reverseOnlyLetters(self, S: str) -> str:
S, i, j = list(S), 0, len(S) - 1
while i < j:
if not S[i].isalpha():
i += 1
elif not S[j].isalpha():
j -= 1
else:
S[i], S[j] = S[j], S[i]
i, j = i + 1, j - 1
return "".join(S)
| 35.075
| 83
| 0.402708
|
9d1caad7dc9660309f1784ce4565c833e1c9304d
| 2,067
|
py
|
Python
|
bindings/pydrake/test/text_logging_test.py
|
RobotLocomotion/drake-python3.7
|
ae397a4c6985262d23e9675b9bf3927c08d027f5
|
[
"BSD-3-Clause"
] | 2
|
2021-02-25T02:01:02.000Z
|
2021-03-17T04:52:04.000Z
|
bindings/pydrake/test/text_logging_test.py
|
RobotLocomotion/drake-python3.7
|
ae397a4c6985262d23e9675b9bf3927c08d027f5
|
[
"BSD-3-Clause"
] | null | null | null |
bindings/pydrake/test/text_logging_test.py
|
RobotLocomotion/drake-python3.7
|
ae397a4c6985262d23e9675b9bf3927c08d027f5
|
[
"BSD-3-Clause"
] | 1
|
2021-06-13T12:05:39.000Z
|
2021-06-13T12:05:39.000Z
|
import re
import subprocess
import unittest
class TestTextLogging(unittest.TestCase):
def expected_message(self, spdlog_level):
# Expected message format:
# [<date> <time>] [console] [<level>] <message>
# See bindings/pydrake/test/text_logging_test_py.cc
expected_messages = {"debug": "Test Debug message",
"info": "Test Info message",
"warn": "Test Warn message",
"error": "Test Error message",
"critical": "Test Critical message"}
level_strings = {"debug": "debug",
"info": "info",
"warn": "warning",
"error": "error",
"critical": "critical"}
message = expected_messages[spdlog_level]
level = level_strings[spdlog_level]
return fr"\[[0-9,\-,\s,:,\.]*\] \[console\] \[{level}\] {message}"
def do_test(self, spdlog_level, expected_spdlog_levels):
output = subprocess.check_output(
["bindings/pydrake/text_logging_example", spdlog_level],
stderr=subprocess.STDOUT).decode("utf8")
expected_output = ""
for level in expected_spdlog_levels:
expected_output += self.expected_message(level) + "\n"
if not expected_output:
self.assertEqual(output, expected_output)
else:
self.assertRegex(output, expected_output)
def test_debug_logging(self):
self.do_test("debug",
["debug", "info", "warn", "error", "critical"])
def test_info_logging(self):
self.do_test("info", ["info", "warn", "error", "critical"])
def test_warning_logging(self):
self.do_test("warn", ["warn", "error", "critical"])
def test_error_logging(self):
self.do_test("err", ["error", "critical"])
def test_critical_logging(self):
self.do_test("critical", ["critical"])
def test_no_logging(self):
self.do_test("off", [])
| 36.263158
| 74
| 0.559748
|
a68ea5fcce58fe75c70670e12a8da923c023083f
| 2,032
|
py
|
Python
|
setup.py
|
tk0miya/nwdiag
|
40849331267f2bb0f2f4f0249a33a53f760d3267
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
tk0miya/nwdiag
|
40849331267f2bb0f2f4f0249a33a53f760d3267
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
tk0miya/nwdiag
|
40849331267f2bb0f2f4f0249a33a53f760d3267
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
from setuptools import setup, find_packages
sys.path.insert(0, 'src')
import nwdiag
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Documentation",
"Topic :: Text Processing :: Markup",
]
requires = ['blockdiag>=1.5.0']
test_requires = ['nose',
'pep8>=1.3',
'flake8',
'flake8-coding',
'flake8-copyright',
'reportlab',
'docutils']
if (3, 2) < sys.version_info < (3, 3):
requires.append('webcolors < 1.5') # webcolors-1.5 does not support py32
setup(
name='nwdiag',
version=nwdiag.__version__,
description='nwdiag generates network-diagram image from text',
long_description=open("README.rst").read(),
classifiers=classifiers,
keywords=['diagram', 'generator'],
author='Takeshi Komiya',
author_email='i.tkomiya at gmail.com',
url='http://blockdiag.com/',
download_url='http://pypi.python.org/pypi/nwdiag',
license='Apache License 2.0',
py_modules=[
'nwdiag_sphinxhelper',
'rackdiag_sphinxhelper',
'packetdiag_sphinxhelper',
],
packages=find_packages('src'),
package_dir={'': 'src'},
package_data={'': ['buildout.cfg']},
include_package_data=True,
install_requires=requires,
extras_require=dict(
testing=test_requires,
pdf=[
'reportlab',
],
rst=[
'docutils',
],
),
test_suite='nose.collector',
tests_require=test_requires,
entry_points="""
[console_scripts]
nwdiag = nwdiag.command:main
rackdiag = rackdiag.command:main
packetdiag = packetdiag.command:main
[blockdiag_noderenderer]
_packet_node = packetdiag.noderenderers
""",
)
| 27.835616
| 77
| 0.605807
|
99fc61fa9703b679fe6a0895e955fe9acb749b41
| 2,376
|
py
|
Python
|
dateparser/data/date_translation_data/ckb.py
|
bazingarj/dateparser
|
48c4563fb7f6ce685fbd6d27e9e83257521d2203
|
[
"BSD-3-Clause"
] | 8
|
2019-11-15T21:00:15.000Z
|
2021-12-21T22:09:42.000Z
|
dateparser/data/date_translation_data/ckb.py
|
bazingarj/dateparser
|
48c4563fb7f6ce685fbd6d27e9e83257521d2203
|
[
"BSD-3-Clause"
] | 9
|
2020-06-05T21:28:57.000Z
|
2022-02-12T12:30:39.000Z
|
dateparser/data/date_translation_data/ckb.py
|
bazingarj/dateparser
|
48c4563fb7f6ce685fbd6d27e9e83257521d2203
|
[
"BSD-3-Clause"
] | 21
|
2019-03-11T04:25:23.000Z
|
2022-02-03T08:54:33.000Z
|
# -*- coding: utf-8 -*-
info = {
"name": "ckb",
"date_order": "YMD",
"january": [
"کانوونی دووەم"
],
"february": [
"شوبات"
],
"march": [
"ئازار"
],
"april": [
"نیسان"
],
"may": [
"ئایار"
],
"june": [
"حوزەیران"
],
"july": [
"تەمووز"
],
"august": [
"ئاب"
],
"september": [
"ئەیلوول"
],
"october": [
"تشرینی یەکەم"
],
"november": [
"تشرینی دووەم"
],
"december": [
"کانونی یەکەم"
],
"monday": [
"دووشەممە"
],
"tuesday": [
"سێشەممە"
],
"wednesday": [
"چوارشەممە"
],
"thursday": [
"پێنجشەممە"
],
"friday": [
"ھەینی"
],
"saturday": [
"شەممە"
],
"sunday": [
"یەکشەممە"
],
"am": [
"بن"
],
"pm": [
"دن"
],
"year": [
"year"
],
"month": [
"month"
],
"week": [
"week"
],
"day": [
"day"
],
"hour": [
"hour"
],
"minute": [
"minute"
],
"second": [
"second"
],
"relative-type": {
"1 year ago": [
"last year"
],
"0 year ago": [
"this year"
],
"in 1 year": [
"next year"
],
"1 month ago": [
"last month"
],
"0 month ago": [
"this month"
],
"in 1 month": [
"next month"
],
"1 week ago": [
"last week"
],
"0 week ago": [
"this week"
],
"in 1 week": [
"next week"
],
"1 day ago": [
"yesterday"
],
"0 day ago": [
"today"
],
"in 1 day": [
"tomorrow"
],
"0 hour ago": [
"this hour"
],
"0 minute ago": [
"this minute"
],
"0 second ago": [
"now"
]
},
"locale_specific": {
"ckb-IR": {
"name": "ckb-IR"
}
},
"skip": [
" ",
".",
",",
";",
"-",
"/",
"'",
"|",
"@",
"[",
"]",
","
]
}
| 15.329032
| 28
| 0.274832
|
5350801c12cc234a33e61c652b12ea9f6faf1873
| 410
|
py
|
Python
|
responder_login/__about__.py
|
theoldmoon0602/responder-login
|
f147e3c6d9db2876a908c459cb7aba905ee1f414
|
[
"MIT"
] | null | null | null |
responder_login/__about__.py
|
theoldmoon0602/responder-login
|
f147e3c6d9db2876a908c459cb7aba905ee1f414
|
[
"MIT"
] | null | null | null |
responder_login/__about__.py
|
theoldmoon0602/responder-login
|
f147e3c6d9db2876a908c459cb7aba905ee1f414
|
[
"MIT"
] | null | null | null |
__title__ = "Responder-Login"
__description__ = "User session management for Responder"
__url__ = "https://github.com/delta114514/responder-login"
__version_info__ = ("0", "0", "3")
__version__ = ".".join(__version_info__)
__author__ = "Yamato Nagata"
__author_email__ = "chickenwingswillfly@gmail.com"
__maintainer__ = "Yamato Nagata"
__license__ = "MIT"
__copyright__ = "(c) 2019 by Yamato Nagata"
| 37.272727
| 59
| 0.741463
|
4ca88345b7322b183a473553c4670587503e8f3f
| 271
|
py
|
Python
|
src/riotwatcher/exceptions.py
|
acgandhi/Riot-Watcher
|
f8a7ea144a00fc62f9ffaee5bad3158f41251589
|
[
"MIT"
] | 489
|
2015-01-04T22:49:51.000Z
|
2022-03-28T03:15:54.000Z
|
src/riotwatcher/exceptions.py
|
acgandhi/Riot-Watcher
|
f8a7ea144a00fc62f9ffaee5bad3158f41251589
|
[
"MIT"
] | 162
|
2015-02-09T22:10:40.000Z
|
2022-02-22T13:48:50.000Z
|
src/riotwatcher/exceptions.py
|
acgandhi/Riot-Watcher
|
f8a7ea144a00fc62f9ffaee5bad3158f41251589
|
[
"MIT"
] | 221
|
2015-01-07T18:01:57.000Z
|
2022-03-26T21:18:48.000Z
|
import requests
from .Handlers import (
ApiError as _ApiError,
IllegalArgumentError as _IllegalArgumentError,
)
ApiError = _ApiError # should silence code analysis warning
IllegalArgumentError = _IllegalArgumentError
TimeoutError = requests.exceptions.Timeout
| 24.636364
| 60
| 0.815498
|
68177b945eb0fb8af4468fdb89b94a4967385dfe
| 951
|
py
|
Python
|
tools/vscode-extension/server/tests/test_events.py
|
orlandoojr1/wave
|
e86d0c87c6c67e510fb4e1fa571982ca0a09f33c
|
[
"Apache-2.0"
] | 1
|
2022-03-02T21:54:36.000Z
|
2022-03-02T21:54:36.000Z
|
tools/vscode-extension/server/tests/test_events.py
|
orlandoojr1/wave
|
e86d0c87c6c67e510fb4e1fa571982ca0a09f33c
|
[
"Apache-2.0"
] | null | null | null |
tools/vscode-extension/server/tests/test_events.py
|
orlandoojr1/wave
|
e86d0c87c6c67e510fb4e1fa571982ca0a09f33c
|
[
"Apache-2.0"
] | null | null | null |
from server.tests.utils import BaseTestCase
class TestEventCompletions(BaseTestCase):
def test_events(self):
self.assert_interaction('q.events.')
def test_events_bracket(self):
self.assert_interaction("q.events['']")
def test_autocomplete_stop(self):
self.assertEqual(len(self.get_completions('q.events.events.')), 0)
def test_autocomplete_stop_bracket(self):
self.assertEqual(len(self.get_completions('q.events[""][""]')), 0)
def test_autocomplete_if_statement(self):
self.assertEqual(len(self.get_completions('if q.events.')), 3)
self.assertEqual(len(self.get_completions('if q.events[""]')), 3)
self.assertEqual(len(self.get_completions("if q.events['']")), 3)
def test_in_function_call(self):
self.assertEqual(len(self.get_completions('print(q.events.)', typing_offset=1)), 3)
self.assertEqual(len(self.get_completions('print(q.events.')), 3)
| 36.576923
| 91
| 0.692955
|
9291b263a2501a9f23ceb5b70a85428766c1a11c
| 216
|
py
|
Python
|
setup.py
|
lidia-nna/stock-tracker-spa
|
d9b0d640836aefefe4d1e3a073bc1c41ab4ea552
|
[
"MIT"
] | null | null | null |
setup.py
|
lidia-nna/stock-tracker-spa
|
d9b0d640836aefefe4d1e3a073bc1c41ab4ea552
|
[
"MIT"
] | 1
|
2021-04-01T15:59:57.000Z
|
2021-04-01T15:59:57.000Z
|
setup.py
|
lidia-nna/stock-tracker-spa
|
d9b0d640836aefefe4d1e3a073bc1c41ab4ea552
|
[
"MIT"
] | null | null | null |
from setuptools import find_packages, setup
setup(
name='app',
version='0.1',
packages=find_packages(),
include_package_data=False,
zip_safe=False,
install_requires=[
'flask',
],
)
| 19.636364
| 43
| 0.643519
|
920bcab8a17595321fdbc401371001724e38f3c4
| 1,068
|
py
|
Python
|
function1.py
|
debdutgoswami/IoT-cloud-function
|
333bfb49e7e13178edcf34901e7a35df7b71d4b7
|
[
"MIT"
] | null | null | null |
function1.py
|
debdutgoswami/IoT-cloud-function
|
333bfb49e7e13178edcf34901e7a35df7b71d4b7
|
[
"MIT"
] | null | null | null |
function1.py
|
debdutgoswami/IoT-cloud-function
|
333bfb49e7e13178edcf34901e7a35df7b71d4b7
|
[
"MIT"
] | null | null | null |
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from datetime import datetime
def update(request):
#geting the variables ready
data = {
#your client_json contents as dictionary
}
request_json = request.get_json()
request_args = request.args
temp,humidity = ""
if request_json and 'temp' in request_json:
temp = request_json['temp']
elif request_args and 'temp' in request_args:
temp = request_args['temp']
# use creds to create a client to interact with the Google Drive API
scope = ['https://spreadsheets.google.com/feeds','https://www.googleapis.com/auth/drive']
creds = ServiceAccountCredentials.from_json_keyfile_dict(data,scope)
client = gspread.authorize(creds)
# Find a workbook by name and open the first sheet
# Make sure you use the right name here.
sheet = client.open("Temperature").sheet1
row = [datetime.now().strftime("%d/%m/%Y %H:%M:%S"), temp]
index = 2
sheet.insert_row(row, index)
| 31.411765
| 94
| 0.676966
|
4556a1727ce3acfb9a1648d53247d6808f305896
| 25,183
|
py
|
Python
|
examples/frontiers/sagehen_90m.py
|
usgs-pygsflow/pygsflow
|
66a6dc2848d6ea41c6e776a4226960d07dcbc85b
|
[
"Unlicense"
] | 1
|
2019-05-24T17:16:32.000Z
|
2019-05-24T17:16:32.000Z
|
examples/frontiers/sagehen_90m.py
|
usgs-pygsflow/pygsflow
|
66a6dc2848d6ea41c6e776a4226960d07dcbc85b
|
[
"Unlicense"
] | 1
|
2019-05-21T23:13:10.000Z
|
2019-05-21T23:13:10.000Z
|
examples/frontiers/sagehen_90m.py
|
usgs-pygsflow/pygsflow
|
66a6dc2848d6ea41c6e776a4226960d07dcbc85b
|
[
"Unlicense"
] | null | null | null |
import os
import utm
import platform
import flopy
import numpy as np
import shapefile
import matplotlib.pyplot as plt
from flopy.utils import Raster
from flopy.plot import styles
from gsflow import GsflowModel, PrmsModel, PrmsData
from gsflow.builder import (
GenerateFishnet,
ModflowBuilder,
ControlFileBuilder,
PrmsBuilder,
FlowAccumulation
)
import gsflow.builder.builder_utils as bu
import pandas as pd
pd.options.mode.chained_assignment = None
def nash_sutcliffe_efficiency(qsim, qobs, flg):
if flg:
qsim = np.log(qsim)
qobs = np.log(qobs)
qsim[np.isinf(qsim)] = np.nan
qobs[np.isinf(qobs)] = np.nan
numerator = np.nansum((qsim - qobs) ** 2)
denominator = np.nansum((qobs - np.nanmean(qobs)) ** 2)
nse = 1 - (numerator / denominator)
return nse
def build_lut(f, dtype=int):
d = {}
with open(f) as foo:
for line in foo:
temp = line.strip().split("#")[0]
if not temp:
continue
else:
l = temp.split(":")
d[dtype(l[0])] = float(l[1])
return d
if __name__ == "__main__":
sample_grid = True
# set file names here
ws = os.path.abspath(os.path.dirname(__file__))
iws = os.path.join(ws, "..", "data", "geospatial")
ows = os.path.join(ws, "temp")
if not os.path.exists(ows):
os.mkdir(ows)
dem_file = os.path.join(iws, 'dem.img')
pour_point_file = os.path.join(iws, "model_points.shp")
resampled_dem = os.path.join(ows, "sagehen_90m_med.txt")
stream_threshold = 810000 # m3 of drainage area
cellsize = 90
# generate a "Fishnet"
modelgrid = GenerateFishnet(dem_file, xcellsize=cellsize, ycellsize=cellsize)
# resample DEM to the model grid using minimum elevation
if sample_grid:
raster = Raster.load(dem_file)
dem = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="median",
multithread=True,
thread_pool=12
)
np.savetxt(resampled_dem, dem, delimiter=" ")
else:
dem = np.genfromtxt(resampled_dem)
hru_type = np.ones((modelgrid.nrow, modelgrid.ncol), dtype=int)
fa = FlowAccumulation(dem,
modelgrid.xcellcenters,
modelgrid.ycellcenters,
hru_type=hru_type, verbose=True)
flow_dir = fa.flow_directions(dijkstra=True)
qx, qy = fa.get_vectors
flow_acc = fa.flow_accumulation()
# read in pour point from shapefile and set watershed boundary
with shapefile.Reader(pour_point_file) as r:
shape = r.shape(0)
pour_point = shape.points
pour_point[0][1] -= 20
pour_point[0][0] -= 20
watershed = fa.define_watershed(pour_point, modelgrid, fmt='xy')
threshold = stream_threshold / (cellsize ** 2)
strm_obj = fa.make_streams(flow_dir, flow_acc, threshold)
cascades = fa.get_cascades(strm_obj)
i = np.floor(3646 / modelgrid.ncol)
j = 3646 % modelgrid.ncol
mfbuild = ModflowBuilder(modelgrid, dem, "sagehen_90m")
botm = dem - 100
botm.shape = (1, modelgrid.nrow, modelgrid.ncol)
ml = mfbuild.build_all(strm_obj.reach_data,
strm_obj.segment_data,
strm_obj.irunbnd,
finf=np.ones(dem.shape),
botm=botm,
ibound=watershed.astype(int),
iuzfbnd=watershed.astype(int)
)
# update dis file to create a transient model
flopy.modflow.ModflowDis(
ml,
nlay=ml.dis.nlay,
nrow=ml.dis.nrow,
ncol=ml.dis.ncol,
nper=2,
delr=ml.dis.delr,
delc=ml.dis.delc,
laycbd=ml.dis.laycbd,
top=ml.dis.top,
botm=ml.dis.botm,
perlen=[1, 5356],
nstp=[1, 5356],
tsmult=[1, 1],
steady=[True, False],
itmuni=ml.dis.itmuni,
lenuni=ml.dis.lenuni
)
# update a few SFR parameters for GSFLOW!
ml.sfr.segment_data[0]["flow"] *= 0
ml.sfr.segment_data[0]["roughch"] = 0.04
ml.sfr.reach_data["strhc1"] = 0.1
# tune some of the other MODFLOW parameters
ml.upw.hk *= 1.75e-03 # (default is 10)
ml.upw.ss *= 1.0 # (default is 1e-06)
prms_outfile = os.path.join(ows, "sagehen.param")
prmsbuild = PrmsBuilder(strm_obj, cascades, modelgrid, fa.get_dem_data().ravel(),
hru_type=watershed, hru_subbasin=watershed)
param_obj = prmsbuild.build()
lat, lon = utm.to_latlon(modelgrid.xcellcenters.ravel(), modelgrid.ycellcenters.ravel(), 10, "N")
param_obj.set_values("hru_lat", lat)
param_obj.set_values("hru_lon", lon)
sample_rasters = True
nhru = modelgrid.nrow * modelgrid.ncol
# load in rasters and luts for parameterizing prms
veg_type_raster = os.path.join(iws, "us_140evt_utm.img")
veg_cov_raster = os.path.join(iws, "us_140evc_utm.img")
awc_raster = os.path.join(iws, "awc.img")
clay_raster = os.path.join(iws, "clay.img")
ksat_raster = os.path.join(iws, "ksat.img")
sand_raster = os.path.join(iws, "sand.img")
impervious_raster = os.path.join(iws, "nlcd2011_imp_utm.img")
prism = {"ppt_utm": [], "tmax_utm": [], "tmin_utm": []}
for folder in prism.keys():
for f in os.listdir(os.path.join(iws, "climate", folder)):
if os.path.isfile(os.path.join(iws, "climate", folder, f)) and f.endswith(".img"):
prism[folder].append(os.path.join(iws, "climate", folder, f))
resampled_veg_type = os.path.join(ows, "veg_type_nearest_90.txt")
resampled_veg_cov = os.path.join(ows, "veg_cov_nearest_90.txt")
resampled_awc = os.path.join(ows, "awc_median_90.txt")
resampled_clay = os.path.join(ows, "clay_median_90.txt")
resampled_ksat = os.path.join(ows, "ksat_median_90.txt")
resampled_sand = os.path.join(ows, "sand_median_90.txt")
resampled_impervious = os.path.join(ows, "impervious_median_90.txt")
resampled_ppt = os.path.join(ows, "ppt_bilinear_90.txt")
resampled_tmax = os.path.join(ows, 'tmax_bilinear_90.txt')
resampled_tmin = os.path.join(ows, 'tmin_bilinear_90.txt')
covtype_remap = os.path.join(iws, "..", 'remaps', "landfire", "covtype.rmp")
covden_sum_remap = os.path.join(iws, "..", 'remaps', "landfire", "covdensum.rmp")
covden_win_remap = os.path.join(iws, "..", 'remaps', "landfire", "covdenwin.rmp")
root_depth_remap = os.path.join(iws, "..", 'remaps', "landfire", 'rtdepth.rmp')
snow_intcp_remap = os.path.join(iws, "..", "remaps", "landfire", "snow_intcp.rmp")
srain_intcp_remap = os.path.join(iws, "..", "remaps", "landfire", "srain_intcp.rmp")
climate_dataframe = os.path.join(iws, 'climate', "sagehen_climate.csv")
climate_lapse_rates = os.path.join(iws, 'climate', 'sagehen_lapse_rates.csv')
if sample_rasters:
ibound = watershed.astype(int)
raster = Raster.load(veg_type_raster)
veg_type = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="nearest",
multithread=True,
thread_pool=12
)
veg_type[ibound == 0] = 0
veg_type = veg_type.astype(int)
np.savetxt(resampled_veg_type, veg_type, fmt="%d")
raster = Raster.load(veg_cov_raster)
# todo: this might need to be a zonal statistic (mode)
veg_cov = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="nearest",
multithread=True,
thread_pool=12
)
veg_cov[ibound == 0] = 0
veg_cov = veg_cov.astype(int)
np.savetxt(resampled_veg_cov, veg_cov, fmt="%d")
raster = Raster.load(awc_raster)
awc = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="median",
multithread=True,
thread_pool=12
)
awc[ibound == 0] = 0
awc[awc == raster.nodatavals[0]] = np.nanmedian(awc)
np.savetxt(resampled_awc, awc)
raster = Raster.load(ksat_raster)
ksat = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="median",
multithread=True,
thread_pool=12
)
ksat[ibound == 0] = 0
ksat[ksat == raster.nodatavals[0]] = np.nanmedian(ksat)
np.savetxt(resampled_ksat, ksat)
raster = Raster.load(sand_raster)
sand = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="median",
multithread=True,
thread_pool=12
)
sand[ibound == 0] = 0
sand[sand == raster.nodatavals[0]] = np.nanmedian(sand)
sand /= 100
np.savetxt(resampled_sand, sand)
raster = Raster.load(clay_raster)
clay = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="median",
multithread=True,
thread_pool=12
)
clay[ibound == 0] = 0
clay[clay == raster.nodatavals[0]] = np.nanmedian(clay)
clay /= 100
np.savetxt(resampled_clay, clay)
raster = Raster.load(impervious_raster)
impervious = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="median",
multithread=True,
thread_pool=12
)
impervious[ibound == 0] = 0
impervious /= 100
np.savetxt(resampled_impervious, impervious)
ppt = []
for rstr in prism["ppt_utm"]:
raster = Raster.load(rstr)
tppt = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="linear",
multithread=True,
thread_pool=12
)
ppt.append(tppt.ravel())
ppt = np.array(ppt)
np.savetxt(resampled_ppt, ppt)
tmin = []
for rstr in prism["tmin_utm"]:
raster = Raster.load(rstr)
ttmin = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="linear",
multithread=True,
thread_pool=12
)
tmin.append(ttmin.ravel())
tmin = np.array(tmin)
np.savetxt(resampled_tmin, tmin)
tmax = []
for rstr in prism["tmax_utm"]:
raster = Raster.load(rstr)
ttmax = raster.resample_to_grid(
modelgrid,
band=raster.bands[0],
method="linear",
multithread=True,
thread_pool=12
)
tmax.append(ttmax.ravel())
tmax = np.array(tmax)
np.savetxt(resampled_tmax, tmax)
else:
veg_type = np.genfromtxt(resampled_veg_type, dtype=int)
veg_cov = np.genfromtxt(resampled_veg_cov, dtype=int)
awc = np.genfromtxt(resampled_awc)
ksat = np.genfromtxt(resampled_ksat)
sand = np.genfromtxt(resampled_sand)
clay = np.genfromtxt(resampled_clay)
impervious = np.genfromtxt(resampled_impervious)
ppt = np.genfromtxt(resampled_ppt)
tmax = np.genfromtxt(resampled_tmax)
tmin = np.genfromtxt(resampled_tmin)
ppt.shape = (12, nhru)
tmax.shape = (12, nhru)
tmin.shape = (12, nhru)
covtype_lut = build_lut(covtype_remap)
covden_sum_lut = build_lut(covden_sum_remap)
covden_win_lut = build_lut(covden_win_remap)
root_depth_lut = build_lut(root_depth_remap)
snow_intcp_lut = build_lut(snow_intcp_remap)
srain_intcp_lut = build_lut(srain_intcp_remap)
# read in "climate dataframe"
cdf = pd.read_csv(climate_dataframe)
ldf = pd.read_csv(climate_lapse_rates)
# build vegatative cover parameters
covtype = bu.covtype(veg_type, covtype_lut)
covden_sum = bu.covden_sum(veg_cov, covden_sum_lut)
covden_win = bu.covden_win(covtype.values, covden_win_lut)
rad_trncf = bu.rad_trncf(covden_win.values)
snow_intcp = bu.snow_intcp(veg_type, snow_intcp_lut)
srain_intcp = bu.srain_intcp(veg_type, srain_intcp_lut)
wrain_intcp = bu.wrain_intcp(veg_type, snow_intcp_lut)
# add veg to param_obj
param_obj.add_record_object(covtype, True)
param_obj.add_record_object(covden_sum, True)
param_obj.add_record_object(covden_win, True)
param_obj.add_record_object(rad_trncf, True)
param_obj.add_record_object(snow_intcp, True)
param_obj.add_record_object(srain_intcp, True)
param_obj.add_record_object(wrain_intcp, True)
# build soil parameters
root_depth = bu.root_depth(veg_type, root_depth_lut)
hru_aspect = bu.d8_to_hru_aspect(flow_dir)
hru_slope = bu.d8_to_hru_slope(
flow_dir,
dem,
modelgrid.xcellcenters,
modelgrid.ycellcenters
)
soil_type = bu.soil_type(clay, sand)
soil_moist_max = bu.soil_moist_max(awc, root_depth)
soil_moist_init = bu.soil_moist_init(soil_moist_max.values)
soil_rech_max = bu.soil_rech_max(awc, root_depth)
ssr2gw_rate = bu.ssr2gw_rate(ksat, sand, soil_moist_max.values)
ssr2gw_sq = bu.ssr2gw_exp(nhru)
soil_rech_init = bu.soil_rech_init(soil_rech_max.values)
slowcoef_lin = bu.slowcoef_lin(ksat, hru_aspect.values, cellsize, cellsize)
slowcoef_sq = bu.slowcoef_sq(
ksat,
hru_aspect.values,
sand,
soil_moist_max.values,
cellsize,
cellsize
)
# add soil parameters to prms object
param_obj.add_record_object(hru_slope, replace=True)
param_obj.add_record_object(hru_aspect, replace=True)
param_obj.add_record_object(soil_type, replace=True)
param_obj.add_record_object(soil_moist_max, replace=True)
param_obj.add_record_object(soil_moist_init, replace=True)
param_obj.add_record_object(soil_rech_max, replace=True)
param_obj.add_record_object(soil_rech_init, replace=True)
param_obj.add_record_object(ssr2gw_rate, replace=True)
param_obj.add_record_object(ssr2gw_sq, replace=True)
param_obj.add_record_object(slowcoef_lin, replace=True)
param_obj.add_record_object(slowcoef_sq, replace=True)
# imperviousness parameters
hru_percent_imperv = bu.hru_percent_imperv(impervious)
carea_max = bu.carea_max(impervious)
# add imperv to prms obj
param_obj.add_record_object(hru_percent_imperv, replace=True)
param_obj.add_record_object(carea_max, replace=True)
# climate parameters
param_obj.add_record(name="nobs", values=[1,])
outlet_sta = modelgrid.intersect(pour_point[0][0], pour_point[0][1])
outlet_sta = modelgrid.get_node([(0,) + outlet_sta])
cdf = bu.add_prms_date_columns_to_df(cdf, "date")
cdf.rename(
columns={
'precip': 'precip_0',
'tmin': 'tmin_0',
'tmax': 'tmax_0',
'runoff': 'runoff_0',
'date': 'Date'
},
inplace=True
)
# reorder dataframe to later build a prms Data object from it
cdfcols = [
"Year", "Month", "Day", "Hour", "Minute", "Second",
"tmax_0", "tmin_0", "precip_0", "runoff_0", "Date"
]
cdf = cdf[cdfcols]
# start climate parameter calculations
mean_ppt = bu.get_mean_monthly_from_df(cdf, 'precip_0')
cdf["tmax_0"] = bu.fahrenheit_to_celsius(cdf["tmax_0"].values)
cdf["tmin_0"] = bu.fahrenheit_to_celsius(cdf["tmin_0"].values)
mean_tmax = bu.get_mean_monthly_from_df(cdf, "tmax_0", temperature=True)
mean_tmin = bu.get_mean_monthly_from_df(cdf, "tmin_0", temperature=True)
rain_adj = bu.rain_adj(ppt, mean_ppt)
snow_adj = bu.snow_adj(ppt, mean_ppt)
tmin_lapse = bu.tmin_lapse(ldf.tmin_lapse.values * (5 / 9))
tmax_lapse = bu.tmax_lapse(ldf.tmax_lapse.values * (5 / 9))
tmax_adj = bu.tmax_adj(nhru)
tmin_adj = bu.tmin_adj(nhru)
jh_coef = bu.calculate_jensen_haise(dem, mean_tmin, mean_tmax)
# add climate parameters to param obj
param_obj.add_record_object(rain_adj, replace=True)
param_obj.add_record_object(snow_adj, replace=True)
param_obj.add_record_object(tmin_lapse, replace=True)
param_obj.add_record_object(tmax_lapse, replace=True)
param_obj.add_record_object(tmax_adj, replace=True)
param_obj.add_record_object(tmin_adj, replace=True)
param_obj.add_record_object(jh_coef, replace=True)
param_obj.add_record(
"outlet_sta",
values=[outlet_sta[0] + 1,],
dimensions=[["one", 1]],
datatype=1
)
param_obj.add_record(
"id_obsrunoff",
values=[outlet_sta[0] + 1, ],
dimensions=[["one", 1]],
datatype=1
)
param_obj.add_record(
"tsta_elev",
values=[1932.4,],
dimensions=[["ntemp", 1]],
datatype=2
)
# build the prms data file
prmsdata = PrmsData(data_df=cdf)
control_obj = ControlFileBuilder().build("saghen_90m", param_obj, ml)
# build the PrmsModel
prms = PrmsModel(control_obj, parameters=param_obj, data=prmsdata)
gsf = GsflowModel(control=control_obj, prms=prms, mf=ml)
gsf.control.set_values("start_time", [1982, 10, 1, 0, 0, 0])
gsf.control.add_record("end_time", values=[1996, 9, 31, 0, 0, 0])
gsf.control.add_record("print_debug", values=[0, ])
gsf.control.add_record("modflow_time_zero", values=[1982, 10, 1, 0, 0, 0])
gsf.control.add_record("data_file", values=["sagehen_90m.data",])
gsf.control.add_record("srunoff_module", values=["srunoff_smidx"])
gsf.control.add_record("model_output_file", values=["gsflow_sagehen_90.out"])
gsf.control.set_values("model_mode", values=["GSFLOW5"])
gsf.control.set_values("subbasin_flag", values=[0,])
gsf.control.set_values("parameter_check_flag", values=[0, ])
gsf.control.add_record("statsON_OFF", values=[1])
gsf.control.add_record("nstatVars", values=[6])
gsf.control.add_record("statVar_element", values=["1", "1", "1", "1", "1", "1"])
gsf.control.add_record("statVar_names",
values=["runoff",
"basin_cfs",
"basin_ssflow_cfs",
"basin_gwflow_cfs",
"basin_sroff_cfs",
"basin_dunnian"])
gsf.control.add_record("stat_var_file", values=["statvar_uncal.dat"])
# Modify PRMS paramters for calibration
# temp dist
tmax_lapse = gsf.prms.parameters.get_values('tmax_lapse')
tmin_lapse = gsf.prms.parameters.get_values('tmin_lapse')
tmax_lapse = tmax_lapse + 1.2 #0.7
tmin_lapse = tmin_lapse + 1.2 #0.7
gsf.prms.parameters.set_values("tmax_lapse", values=tmax_lapse)
gsf.prms.parameters.set_values("tmin_lapse", values=tmin_lapse)
max_missing = gsf.prms.parameters.get_values('max_missing')
max_missing = max_missing * 2
gsf.prms.parameters.set_values("max_missing", values=max_missing)
# snow
tmax_allsnow = gsf.prms.parameters.get_values('tmax_allsnow')
tmax_allsnow[:] = 0.7
gsf.prms.parameters.set_values("tmax_allsnow", values=tmax_allsnow)
value = [2.1, 2.1, 2.1, 2.1, 2.1, 2.1, 2.1, 2.1, 2.1, 2.1, 2.1, 2.1]
gsf.prms.parameters.add_record("tmax_allrain_offset", values=value, dimensions=[('nmonths', 12)])
covden_win = gsf.prms.parameters.get_values('covden_win')
rad_trncf = gsf.prms.parameters.get_values('rad_trncf')
rad_trncf = 0.8 * covden_win # correlated to covden_win
gsf.prms.parameters.set_values("rad_trncf", values=rad_trncf)
# ET
soil_moist_max = gsf.prms.parameters.get_values('soil_moist_max')
soil_moist_max = soil_moist_max * 3.0
gsf.prms.parameters.set_values("soil_moist_max", values=soil_moist_max)
value = [0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03, 0.03]
gsf.prms.parameters.add_record("jh_coef", values=value, dimensions=[('nmonths', 12)])
# adding a 2D parameter like rain_adj then dimensions = [('nmonths', 12), ('nrhu', 5545454)]
# runoff
snowinfil_max = gsf.prms.parameters.get_values('snowinfil_max')
snowinfil_max = snowinfil_max * 15.0
gsf.prms.parameters.set_values("snowinfil_max", values=snowinfil_max)
smidx_coef = gsf.prms.parameters.get_values('smidx_coef')
smidx_coef = smidx_coef / 100.0
smidx_exp = gsf.prms.parameters.get_values('smidx_exp')
smidx_exp = smidx_exp / 100.0
carea_max = gsf.prms.parameters.get_values('carea_max')
carea_max = carea_max / 100.0
gsf.prms.parameters.set_values("smidx_coef", values=smidx_coef)
gsf.prms.parameters.set_values("smidx_exp", values=smidx_exp)
gsf.prms.parameters.set_values("carea_max", values=carea_max)
# interflow
slowcoef_sq = gsf.prms.parameters.get_values('slowcoef_sq')
slowcoef_sq = slowcoef_sq * 0.1
gsf.prms.parameters.set_values("slowcoef_sq", values=slowcoef_sq)
slowcoef_lin = gsf.prms.parameters.get_values('slowcoef_lin')
slowcoef_lin = slowcoef_lin * 3.0
gsf.prms.parameters.set_values("slowcoef_lin", values=slowcoef_lin)
# Recharge
ssr2gw_rate = gsf.prms.parameters.get_values('ssr2gw_rate')
ssr2gw_rate = ssr2gw_rate * 500.0
gsf.prms.parameters.set_values("ssr2gw_rate", values=ssr2gw_rate)
sat_threshold = gsf.prms.parameters.get_values('sat_threshold')
sat_threshold = sat_threshold / 3
gsf.prms.parameters.set_values("sat_threshold", values=sat_threshold)
# clean unused parameters
par_to_remove = ["gw_up_id", "gw_down_id", "gw_strmseg_down_id", "gw_pct_up"]
for par_ in par_to_remove:
gsf.prms.parameters.remove_record(par_)
gsf.write_input(basename="sagehen_90m", workspace=ows)
gsf.prms.parameters.remove_record("adjmix_rain")
exe_name = os.path.join("..", "..", "bin", "gsflow")
if platform.system().lower() == "windows":
exe_name += ".exe"
# reload the model to assure that it has valid formatting
gsf = GsflowModel.load_from_file(os.path.join(ows, "sagehen_90m_cont.control"))
gsf.run_model(gsflow_exe=exe_name)
# load PRMS output with simulated and measured streamflow and flow components
stats = gsf.prms.get_StatVar()
# get data from 10/1/1985 onward
stats = stats[1096:]
stats.reset_index(inplace=True, drop=True)
# Calculate N-S and Log(N-S)
nse_val = nash_sutcliffe_efficiency(stats.basin_cfs_1, stats.runoff_1, False)
nse_val_log = nash_sutcliffe_efficiency(stats.basin_cfs_1, stats.runoff_1, True)
nse_val_log_str = str(nse_val_log)
print(nse_val,nse_val_log)
gw_seepage = stats.basin_cfs_1.values.copy() - (
stats.basin_ssflow_cfs_1.values.copy() +
stats.basin_sroff_cfs_1.values.copy() +
stats.basin_dunnian_1.values.copy()
)
with styles.USGSMap():
fig, axis = plt.subplots(2,1,figsize=(10, 6))
plt.rcParams.update({'font.size': 100})
axis[0].plot(stats.Date, stats.basin_cfs_1, color='r', linewidth=2.2, label='simulated')
axis[0].plot(stats.Date, stats.runoff_1, '--', color='b', linewidth=1.5, label='measured')
handles, labels = axis[0].get_legend_handles_labels()
axis[0].legend(handles, labels, bbox_to_anchor=(0.25, 0.65))
axis[0].set_xlabel("Date")
axis[0].set_ylabel("Streamflow, in cfs")
axis[0].set_ylim(0, 400)
plt.xlabel("Date")
plt.ylabel("Streamflow, in cfs")
plt.ylim(0, 400)
with styles.USGSMap():
axis[1].set_xlabel("Date")
axis[1].set_ylabel("Flow Components, in cfs")
axis[1].set_yscale("log")
plt.xlabel("Date")
plt.ylabel("Flow Components, in cfs")
plt.yscale("log")
plt.ylim(1.0e-3, 1.0e4)
axis[1].plot(stats.Date, stats.basin_ssflow_cfs_1, color='r', linewidth=1.5, label='Interflow')
axis[1].plot(stats.Date, gw_seepage, color='purple', linewidth=1.5, label='Groundwater seepage')
axis[1].plot(stats.Date, stats.basin_sroff_cfs_1, color='y', linewidth=1.5, label='Hortonian runoff')
axis[1].plot(stats.Date, stats.basin_dunnian_1, color='b', linewidth=1.5, label='Dunnian runoff')
handles, labels = axis[1].get_legend_handles_labels()
axis[1].legend(handles, labels, bbox_to_anchor=(0.25, 0.65))
plt.tight_layout()
plt.show()
| 39.043411
| 110
| 0.622523
|
f2dedb2b3b457dbec64ad4b148b87b72ca25136f
| 1,861
|
gyp
|
Python
|
base/base.gyp
|
PeterDaveHello/incubator-pagespeed-mod
|
885f4653e204e1152cb3928f0755d93ec5fdceae
|
[
"Apache-2.0"
] | 2
|
2019-11-02T07:54:17.000Z
|
2020-04-16T09:26:51.000Z
|
base/base.gyp
|
PeterDaveHello/incubator-pagespeed-mod
|
885f4653e204e1152cb3928f0755d93ec5fdceae
|
[
"Apache-2.0"
] | 12
|
2017-03-14T18:26:11.000Z
|
2021-10-01T15:33:50.000Z
|
base/base.gyp
|
PeterDaveHello/incubator-pagespeed-mod
|
885f4653e204e1152cb3928f0755d93ec5fdceae
|
[
"Apache-2.0"
] | 1
|
2020-05-20T07:09:05.000Z
|
2020-05-20T07:09:05.000Z
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Base was branched from the chromium version to reduce the number of
# dependencies of this package. Specifically, we would like to avoid
# depending on the chrome directory, which contains the chrome version
# and branding information.
# TODO(morlovich): push this refactoring to chronium trunk.
{
'variables': {
'chromium_code': 1,
'chromium_root': '<(DEPTH)/third_party/chromium/src',
},
'includes': [
'base.gypi',
],
'targets': [
{
# This is the subset of files from base that should not be used with a
# dynamic library. Note that this library cannot depend on base because
# base depends on base_static.
'target_name': 'base_static',
'type': 'static_library',
'sources': [
'<(chromium_root)/base/base_switches.cc',
'<(chromium_root)/base/base_switches.h',
'<(chromium_root)/base/win/pe_image.cc',
'<(chromium_root)/base/win/pe_image.h',
],
'include_dirs': [
'<(chromium_root)',
'<(DEPTH)',
],
},
{
'target_name': 'base_unittests',
'type': 'executable',
'sources': [
'<(chromium_root)/base/string_piece_unittest.cc',
'<(chromium_root)/base/win/win_util_unittest.cc',
],
'dependencies': [
'base',
'base_static',
'<(DEPTH)/testing/gmock.gyp:gmock',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/testing/gtest.gyp:gtest_main',
],
'include_dirs': [
'<(DEPTH)',
],
'conditions': [
['OS != "win"', {
'sources!': [
'<(chromium_root)/base/win_util_unittest.cc',
],
}],
],
},
],
}
| 29.078125
| 77
| 0.586781
|
d8fce5b0ee358018e5fb6e51aa343aed8828b827
| 1,928
|
py
|
Python
|
aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/DescribeVsDomainRecordDataRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/DescribeVsDomainRecordDataRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-vs/aliyunsdkvs/request/v20181212/DescribeVsDomainRecordDataRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkvs.endpoint import endpoint_data
class DescribeVsDomainRecordDataRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'vs', '2018-12-12', 'DescribeVsDomainRecordData')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_StartTime(self):
return self.get_query_params().get('StartTime')
def set_StartTime(self,StartTime):
self.add_query_param('StartTime',StartTime)
def get_DomainName(self):
return self.get_query_params().get('DomainName')
def set_DomainName(self,DomainName):
self.add_query_param('DomainName',DomainName)
def get_EndTime(self):
return self.get_query_params().get('EndTime')
def set_EndTime(self,EndTime):
self.add_query_param('EndTime',EndTime)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
| 34.428571
| 78
| 0.762967
|
e4e2cbbd63a010383ef5c34e579a345ee10605af
| 1,926
|
py
|
Python
|
13.py
|
jick155/Y.L_leetcode
|
a3bae5014bc194261b016d88682602d0fe714f48
|
[
"MIT"
] | null | null | null |
13.py
|
jick155/Y.L_leetcode
|
a3bae5014bc194261b016d88682602d0fe714f48
|
[
"MIT"
] | null | null | null |
13.py
|
jick155/Y.L_leetcode
|
a3bae5014bc194261b016d88682602d0fe714f48
|
[
"MIT"
] | null | null | null |
"""
Roman numerals are represented by seven different symbols: I, V, X, L, C, D and M.
Symbol Value
I 1
V 5
X 10
L 50
C 100
D 500
M 1000
For example, two is written as II in Roman numeral, just two one's added together. Twelve is written as, XII, which is simply X + II. The number twenty seven is written as XXVII, which is XX + V + II.
Roman numerals are usually written largest to smallest from left to right. However, the numeral for four is not IIII. Instead, the number four is written as IV. Because the one is before the five we subtract it making four. The same principle applies to the number nine, which is written as IX. There are six instances where subtraction is used:
I can be placed before V (5) and X (10) to make 4 and 9.
X can be placed before L (50) and C (100) to make 40 and 90.
C can be placed before D (500) and M (1000) to make 400 and 900.
Given a roman numeral, convert it to an integer. Input is guaranteed to be within the range from 1 to 3999.
Example 1:
Input: "III"
Output: 3
Example 2:
Input: "IV"
Output: 4
Example 3:
Input: "IX"
Output: 9
Example 4:
Input: "LVIII"
Output: 58
Explanation: L = 50, V= 5, III = 3.
Example 5:
Input: "MCMXCIV"
Output: 1994
Explanation: M = 1000, CM = 900, XC = 90 and IV = 4.
中文題目:羅馬文字轉整數
I,V,X,L,C,D,M 分別代表不同整數
順序由右至左,如果右值大於左值 則運算式-->左-右
Example 2:
Input: "IV"
Output: 4 --> 5 - 1
Space : 有兩個變數,且每次判斷都會更改一次--> O(2n)-> O(n)
Time : 一個for迴圈,O(n)
"""
class Solution:
def romanToInt(self, s: str) -> int:
# 需要一個存值,一個存目前位置的值(用於判別是否左大於右)
res, prev = 0, 0
dict = {'I':1, 'V':5, 'X':10, 'L':50, 'C':100, 'D':500, 'M':1000}
for i in s[::-1]: # 由右至左
if dict[i] >= prev:
res += dict[i]
else:
res -= dict[i]
prev = dict[i]
return res
| 26.75
| 345
| 0.605919
|
a1968aba4cc4bcacd944cc1ea4e3cecfe8679083
| 1,959
|
py
|
Python
|
nicos_ess/utilities/csv_utils.py
|
ebadkamil/nicos
|
0355a970d627aae170c93292f08f95759c97f3b5
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 12
|
2019-11-06T15:40:36.000Z
|
2022-01-01T16:23:00.000Z
|
nicos_ess/utilities/csv_utils.py
|
ebadkamil/nicos
|
0355a970d627aae170c93292f08f95759c97f3b5
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 4
|
2019-11-08T10:18:16.000Z
|
2021-01-13T13:07:29.000Z
|
nicos_ess/utilities/csv_utils.py
|
ISISComputingGroup/nicos
|
94cb4d172815919481f8c6ee686f21ebb76f2068
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 6
|
2020-01-11T10:52:30.000Z
|
2022-02-25T12:35:23.000Z
|
# -*- coding: utf-8 -*-
# *****************************************************************************
# NICOS, the Networked Instrument Control System of the MLZ
# Copyright (c) 2009-2021 by the NICOS contributors (see AUTHORS)
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
#
# Ebad Kamil <Ebad.Kamil@ess.eu>
# Matt Clarke <matt.clarke@ess.eu>
#
# *****************************************************************************
import csv
def export_table_to_csv(data, filename, headers=None):
"""Export 2D data list to a text file.
:param data: 2D data list
:param filename: file to save as
:param headers: List of column names.
"""
with open(filename, "w") as file:
writer = csv.writer(file)
if headers:
writer.writerow(headers)
writer.writerows(data)
def import_table_from_csv(filename):
"""Import tabular data from a csv file.
:param filename: path to csv file
:return: tuple of headers (empty if no headers) and rows
"""
with open(filename, "r") as file:
sniffer = csv.Sniffer()
has_header = sniffer.has_header(file.read(2048))
file.seek(0)
rows = list(csv.reader(file))
if has_header:
return rows[0], rows[1:]
return [], rows
| 34.368421
| 79
| 0.627361
|
b216450dbc15c08471b04fd78bb965443cb2e1fd
| 21,653
|
py
|
Python
|
tests/functional/test_download.py
|
DiegoCaraballo/pip
|
4b6bb5da37c182acd509c85f72840ca37108cf65
|
[
"MIT"
] | null | null | null |
tests/functional/test_download.py
|
DiegoCaraballo/pip
|
4b6bb5da37c182acd509c85f72840ca37108cf65
|
[
"MIT"
] | null | null | null |
tests/functional/test_download.py
|
DiegoCaraballo/pip
|
4b6bb5da37c182acd509c85f72840ca37108cf65
|
[
"MIT"
] | null | null | null |
import os.path
import textwrap
import pytest
from pip._internal.cli.status_codes import ERROR
from tests.lib.path import Path
def fake_wheel(data, wheel_path):
data.packages.join(
'simple.dist-0.1-py2.py3-none-any.whl'
).copy(data.packages.join(wheel_path))
@pytest.mark.network
def test_download_if_requested(script):
"""
It should download (in the scratch path) and not install if requested.
"""
result = script.pip(
'download', '-d', 'pip_downloads', 'INITools==0.1', expect_error=True
)
assert Path('scratch') / 'pip_downloads' / 'INITools-0.1.tar.gz' \
in result.files_created
assert script.site_packages / 'initools' not in result.files_created
@pytest.mark.network
def test_basic_download_setuptools(script):
"""
It should download (in the scratch path) and not install if requested.
"""
result = script.pip('download', 'setuptools')
setuptools_prefix = str(Path('scratch') / 'setuptools')
assert any(
path.startswith(setuptools_prefix) for path in result.files_created
)
def test_download_wheel(script, data):
"""
Test using "pip download" to download a *.whl archive.
"""
result = script.pip(
'download',
'--no-index',
'-f', data.packages,
'-d', '.', 'meta'
)
assert (
Path('scratch') / 'meta-1.0-py2.py3-none-any.whl'
in result.files_created
)
assert script.site_packages / 'piptestpackage' not in result.files_created
@pytest.mark.network
def test_single_download_from_requirements_file(script):
"""
It should support download (in the scratch path) from PyPI from a
requirements file
"""
script.scratch_path.join("test-req.txt").write(textwrap.dedent("""
INITools==0.1
"""))
result = script.pip(
'download', '-r', script.scratch_path / 'test-req.txt', '-d', '.',
expect_error=True,
)
assert Path('scratch') / 'INITools-0.1.tar.gz' in result.files_created
assert script.site_packages / 'initools' not in result.files_created
@pytest.mark.network
def test_basic_download_should_download_dependencies(script):
"""
It should download dependencies (in the scratch path)
"""
result = script.pip(
'download', 'Paste[openid]==1.7.5.1', '-d', '.', expect_error=True,
)
assert Path('scratch') / 'Paste-1.7.5.1.tar.gz' in result.files_created
openid_tarball_prefix = str(Path('scratch') / 'python-openid-')
assert any(
path.startswith(openid_tarball_prefix) for path in result.files_created
)
assert script.site_packages / 'openid' not in result.files_created
def test_download_wheel_archive(script, data):
"""
It should download a wheel archive path
"""
wheel_filename = 'colander-0.9.9-py2.py3-none-any.whl'
wheel_path = '/'.join((data.find_links, wheel_filename))
result = script.pip(
'download', wheel_path,
'-d', '.', '--no-deps'
)
assert Path('scratch') / wheel_filename in result.files_created
def test_download_should_download_wheel_deps(script, data):
"""
It should download dependencies for wheels(in the scratch path)
"""
wheel_filename = 'colander-0.9.9-py2.py3-none-any.whl'
dep_filename = 'translationstring-1.1.tar.gz'
wheel_path = '/'.join((data.find_links, wheel_filename))
result = script.pip(
'download', wheel_path,
'-d', '.', '--find-links', data.find_links, '--no-index'
)
assert Path('scratch') / wheel_filename in result.files_created
assert Path('scratch') / dep_filename in result.files_created
@pytest.mark.network
def test_download_should_skip_existing_files(script):
"""
It should not download files already existing in the scratch dir
"""
script.scratch_path.join("test-req.txt").write(textwrap.dedent("""
INITools==0.1
"""))
result = script.pip(
'download', '-r', script.scratch_path / 'test-req.txt', '-d', '.',
expect_error=True,
)
assert Path('scratch') / 'INITools-0.1.tar.gz' in result.files_created
assert script.site_packages / 'initools' not in result.files_created
# adding second package to test-req.txt
script.scratch_path.join("test-req.txt").write(textwrap.dedent("""
INITools==0.1
python-openid==2.2.5
"""))
# only the second package should be downloaded
result = script.pip(
'download', '-r', script.scratch_path / 'test-req.txt', '-d', '.',
expect_error=True,
)
openid_tarball_prefix = str(Path('scratch') / 'python-openid-')
assert any(
path.startswith(openid_tarball_prefix) for path in result.files_created
)
assert Path('scratch') / 'INITools-0.1.tar.gz' not in result.files_created
assert script.site_packages / 'initools' not in result.files_created
assert script.site_packages / 'openid' not in result.files_created
@pytest.mark.network
def test_download_vcs_link(script):
"""
It should allow -d flag for vcs links, regression test for issue #798.
"""
result = script.pip(
'download', '-d', '.', 'git+git://github.com/pypa/pip-test-package.git'
)
assert (
Path('scratch') / 'pip-test-package-0.1.1.zip'
in result.files_created
)
assert script.site_packages / 'piptestpackage' not in result.files_created
def test_only_binary_set_then_download_specific_platform(script, data):
"""
Confirm that specifying an interpreter/platform constraint
is allowed when ``--only-binary=:all:`` is set.
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', 'linux_x86_64',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-py2.py3-none-any.whl'
in result.files_created
)
def test_no_deps_set_then_download_specific_platform(script, data):
"""
Confirm that specifying an interpreter/platform constraint
is allowed when ``--no-deps`` is set.
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--no-deps',
'--dest', '.',
'--platform', 'linux_x86_64',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-py2.py3-none-any.whl'
in result.files_created
)
def test_download_specific_platform_fails(script, data):
"""
Confirm that specifying an interpreter/platform constraint
enforces that ``--no-deps`` or ``--only-binary=:all:`` is set.
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--dest', '.',
'--platform', 'linux_x86_64',
'fake',
expect_error=True,
)
assert '--only-binary=:all:' in result.stderr
def test_no_binary_set_then_download_specific_platform_fails(script, data):
"""
Confirm that specifying an interpreter/platform constraint
enforces that ``--only-binary=:all:`` is set without ``--no-binary``.
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--no-binary=fake',
'--dest', '.',
'--platform', 'linux_x86_64',
'fake',
expect_error=True,
)
assert '--only-binary=:all:' in result.stderr
def test_download_specify_platform(script, data):
"""
Test using "pip download --platform" to download a .whl archive
supported for a specific platform
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl')
# Confirm that universal wheels are returned even for specific
# platforms.
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', 'linux_x86_64',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-py2.py3-none-any.whl'
in result.files_created
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', 'macosx_10_9_x86_64',
'fake'
)
data.reset()
fake_wheel(data, 'fake-1.0-py2.py3-none-macosx_10_9_x86_64.whl')
fake_wheel(data, 'fake-2.0-py2.py3-none-linux_x86_64.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', 'macosx_10_10_x86_64',
'fake'
)
assert (
Path('scratch') /
'fake-1.0-py2.py3-none-macosx_10_9_x86_64.whl'
in result.files_created
)
# OSX platform wheels are not backward-compatible.
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', 'macosx_10_8_x86_64',
'fake',
expect_error=True,
)
# No linux wheel provided for this version.
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', 'linux_x86_64',
'fake==1',
expect_error=True,
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', 'linux_x86_64',
'fake==2'
)
assert (
Path('scratch') / 'fake-2.0-py2.py3-none-linux_x86_64.whl'
in result.files_created
)
class TestDownloadPlatformManylinuxes(object):
"""
"pip download --platform" downloads a .whl archive supported for
manylinux platforms.
"""
@pytest.mark.parametrize("platform", [
"linux_x86_64",
"manylinux1_x86_64",
"manylinux2010_x86_64",
])
def test_download_universal(self, platform, script, data):
"""
Universal wheels are returned even for specific platforms.
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', platform,
'fake',
)
assert (
Path('scratch') / 'fake-1.0-py2.py3-none-any.whl'
in result.files_created
)
@pytest.mark.parametrize("wheel_abi,platform", [
("manylinux1_x86_64", "manylinux1_x86_64"),
("manylinux1_x86_64", "manylinux2010_x86_64"),
("manylinux2010_x86_64", "manylinux2010_x86_64"),
])
def test_download_compatible_manylinuxes(
self, wheel_abi, platform, script, data,
):
"""
Earlier manylinuxes are compatible with later manylinuxes.
"""
wheel = 'fake-1.0-py2.py3-none-{}.whl'.format(wheel_abi)
fake_wheel(data, wheel)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', platform,
'fake',
)
assert Path('scratch') / wheel in result.files_created
def test_explicit_platform_only(self, data, script):
"""
When specifying the platform, manylinux1 needs to be the
explicit platform--it won't ever be added to the compatible
tags.
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-linux_x86_64.whl')
script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--platform', 'linux_x86_64',
'fake',
expect_error=True,
)
def test_download__python_version(script, data):
"""
Test using "pip download --python-version" to download a .whl archive
supported for a specific interpreter
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--python-version', '2',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-py2.py3-none-any.whl'
in result.files_created
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--python-version', '3',
'fake'
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--python-version', '27',
'fake'
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--python-version', '33',
'fake'
)
data.reset()
fake_wheel(data, 'fake-1.0-py2-none-any.whl')
fake_wheel(data, 'fake-2.0-py3-none-any.whl')
# No py3 provided for version 1.
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--python-version', '3',
'fake==1.0',
expect_error=True,
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--python-version', '2',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-py2-none-any.whl'
in result.files_created
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--python-version', '26',
'fake'
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--python-version', '3',
'fake'
)
assert (
Path('scratch') / 'fake-2.0-py3-none-any.whl'
in result.files_created
)
def make_wheel_with_python_requires(script, package_name, python_requires):
"""
Create a wheel using the given python_requires.
:return: the path to the wheel file.
"""
package_dir = script.scratch_path / package_name
package_dir.mkdir()
text = textwrap.dedent("""\
from setuptools import setup
setup(name='{}',
python_requires='{}',
version='1.0')
""").format(package_name, python_requires)
package_dir.join('setup.py').write(text)
script.run(
'python', 'setup.py', 'bdist_wheel', '--universal', cwd=package_dir,
)
file_name = '{}-1.0-py2.py3-none-any.whl'.format(package_name)
return package_dir / 'dist' / file_name
def test_download__python_version_used_for_python_requires(
script, data, with_wheel,
):
"""
Test that --python-version is used for the Requires-Python check.
"""
wheel_path = make_wheel_with_python_requires(
script, 'mypackage', python_requires='==3.2',
)
wheel_dir = os.path.dirname(wheel_path)
def make_args(python_version):
return [
'download', '--no-index', '--find-links', wheel_dir,
'--only-binary=:all:',
'--dest', '.',
'--python-version', python_version,
'mypackage==1.0',
]
args = make_args('33')
result = script.pip(*args, expect_error=True)
expected_err = (
"ERROR: Package 'mypackage' requires a different Python: "
"3.3.0 not in '==3.2'"
)
assert expected_err in result.stderr, 'stderr: {}'.format(result.stderr)
# Now try with a --python-version that satisfies the Requires-Python.
args = make_args('32')
script.pip(*args) # no exception
def test_download_specify_abi(script, data):
"""
Test using "pip download --abi" to download a .whl archive
supported for a specific abi
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--implementation', 'fk',
'--abi', 'fake_abi',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-py2.py3-none-any.whl'
in result.files_created
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--implementation', 'fk',
'--abi', 'none',
'fake'
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--abi', 'cp27m',
'fake',
expect_error=True,
)
data.reset()
fake_wheel(data, 'fake-1.0-fk2-fakeabi-fake_platform.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--python-version', '2',
'--implementation', 'fk',
'--platform', 'fake_platform',
'--abi', 'fakeabi',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-fk2-fakeabi-fake_platform.whl'
in result.files_created
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--implementation', 'fk',
'--platform', 'fake_platform',
'--abi', 'none',
'fake',
expect_error=True,
)
def test_download_specify_implementation(script, data):
"""
Test using "pip download --abi" to download a .whl archive
supported for a specific abi
"""
fake_wheel(data, 'fake-1.0-py2.py3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--implementation', 'fk',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-py2.py3-none-any.whl'
in result.files_created
)
data.reset()
fake_wheel(data, 'fake-1.0-fk2.fk3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--implementation', 'fk',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-fk2.fk3-none-any.whl'
in result.files_created
)
data.reset()
fake_wheel(data, 'fake-1.0-fk3-none-any.whl')
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--implementation', 'fk',
'--python-version', '3',
'fake'
)
assert (
Path('scratch') / 'fake-1.0-fk3-none-any.whl'
in result.files_created
)
result = script.pip(
'download', '--no-index', '--find-links', data.find_links,
'--only-binary=:all:',
'--dest', '.',
'--implementation', 'fk',
'--python-version', '2',
'fake',
expect_error=True,
)
def test_download_exit_status_code_when_no_requirements(script):
"""
Test download exit status code when no requirements specified
"""
result = script.pip('download', expect_error=True)
assert (
"You must give at least one requirement to download" in result.stderr
)
assert result.returncode == ERROR
def test_download_exit_status_code_when_blank_requirements_file(script):
"""
Test download exit status code when blank requirements file specified
"""
script.scratch_path.join("blank.txt").write("\n")
script.pip('download', '-r', 'blank.txt')
def test_download_prefer_binary_when_tarball_higher_than_wheel(script, data):
fake_wheel(data, 'source-0.8-py2.py3-none-any.whl')
result = script.pip(
'download',
'--prefer-binary',
'--no-index',
'-f', data.packages,
'-d', '.', 'source'
)
assert (
Path('scratch') / 'source-0.8-py2.py3-none-any.whl'
in result.files_created
)
assert (
Path('scratch') / 'source-1.0.tar.gz'
not in result.files_created
)
def test_download_prefer_binary_when_wheel_doesnt_satisfy_req(script, data):
fake_wheel(data, 'source-0.8-py2.py3-none-any.whl')
script.scratch_path.join("test-req.txt").write(textwrap.dedent("""
source>0.9
"""))
result = script.pip(
'download',
'--prefer-binary',
'--no-index',
'-f', data.packages,
'-d', '.',
'-r', script.scratch_path / 'test-req.txt'
)
assert (
Path('scratch') / 'source-1.0.tar.gz'
in result.files_created
)
assert (
Path('scratch') / 'source-0.8-py2.py3-none-any.whl'
not in result.files_created
)
def test_download_prefer_binary_when_only_tarball_exists(script, data):
result = script.pip(
'download',
'--prefer-binary',
'--no-index',
'-f', data.packages,
'-d', '.', 'source'
)
assert (
Path('scratch') / 'source-1.0.tar.gz'
in result.files_created
)
| 29.419837
| 79
| 0.579365
|
4ed88f1c52dad3e783c46d6e0c305dcd09a42f80
| 177
|
py
|
Python
|
abc/abc079/abc079b.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | 1
|
2019-08-21T00:49:34.000Z
|
2019-08-21T00:49:34.000Z
|
abc/abc079/abc079b.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
abc/abc079/abc079b.py
|
c-yan/atcoder
|
940e49d576e6a2d734288fadaf368e486480a948
|
[
"MIT"
] | null | null | null |
N = int(input())
if N == 1:
print(1)
else:
L = [0] * (N + 1)
L[0] = 2
L[1] = 1
for i in range(2, N + 1):
L[i] = L[i - 1] + L[i - 2]
print(L[N])
| 14.75
| 34
| 0.350282
|
3a6fc1ac5dc223caeed7dc40cc4df25ed9758b0f
| 18,179
|
py
|
Python
|
src/command_modules/azure-cli-rdbms/azure/cli/command_modules/rdbms/tests/test_rdbms_commands.py
|
v-Ajnava/azure-cli
|
febec631d79bfca151e84267b5b409594bad598e
|
[
"MIT"
] | null | null | null |
src/command_modules/azure-cli-rdbms/azure/cli/command_modules/rdbms/tests/test_rdbms_commands.py
|
v-Ajnava/azure-cli
|
febec631d79bfca151e84267b5b409594bad598e
|
[
"MIT"
] | 3
|
2021-03-26T00:48:20.000Z
|
2022-03-29T22:05:39.000Z
|
src/command_modules/azure-cli-rdbms/azure/cli/command_modules/rdbms/tests/test_rdbms_commands.py
|
v-Ajnava/azure-cli
|
febec631d79bfca151e84267b5b409594bad598e
|
[
"MIT"
] | 1
|
2017-12-28T04:51:44.000Z
|
2017-12-28T04:51:44.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from datetime import datetime
from dateutil.tz import tzutc
from azure.cli.testsdk.base import execute
from azure.cli.testsdk.exceptions import CliTestError
from azure.cli.testsdk import (
JMESPathCheck,
NoneCheck,
ResourceGroupPreparer,
ScenarioTest)
from azure.cli.testsdk.preparers import (
AbstractPreparer,
SingleValueReplacer)
# Constants
SERVER_NAME_PREFIX = 'azuredbclitest'
SERVER_NAME_MAX_LENGTH = 63
class ServerPreparer(AbstractPreparer, SingleValueReplacer):
# pylint: disable=too-many-instance-attributes
def __init__(self, engine_type='mysql', engine_parameter_name='database_engine',
name_prefix=SERVER_NAME_PREFIX, parameter_name='server', location='westeurope',
admin_user='cloudsa', admin_password='SecretPassword123',
resource_group_parameter_name='resource_group', skip_delete=True):
super(ServerPreparer, self).__init__(name_prefix, SERVER_NAME_MAX_LENGTH)
self.engine_type = engine_type
self.engine_parameter_name = engine_parameter_name
self.location = location
self.parameter_name = parameter_name
self.admin_user = admin_user
self.admin_password = admin_password
self.resource_group_parameter_name = resource_group_parameter_name
self.skip_delete = skip_delete
def create_resource(self, name, **kwargs):
group = self._get_resource_group(**kwargs)
template = 'az {} server create -l {} -g {} -n {} -u {} -p {}'
execute(template.format(self.engine_type,
self.location,
group, name,
self.admin_user,
self.admin_password))
return {self.parameter_name: name,
self.engine_parameter_name: self.engine_type}
def remove_resource(self, name, **kwargs):
if not self.skip_delete:
group = self._get_resource_group(**kwargs)
execute('az {} server delete -g {} -n {} --yes'.format(self.engine_type, group, name))
def _get_resource_group(self, **kwargs):
return kwargs.get(self.resource_group_parameter_name)
class ServerMgmtScenarioTest(ScenarioTest):
@ResourceGroupPreparer(parameter_name='resource_group_1')
@ResourceGroupPreparer(parameter_name='resource_group_2')
def test_mysql_server_mgmt(self, resource_group_1, resource_group_2):
self._test_server_mgmt('mysql', resource_group_1, resource_group_2)
@ResourceGroupPreparer(parameter_name='resource_group_1')
@ResourceGroupPreparer(parameter_name='resource_group_2')
def test_postgres_server_mgmt(self, resource_group_1, resource_group_2):
self._test_server_mgmt('postgres', resource_group_1, resource_group_2)
def _test_server_mgmt(self, database_engine, resource_group_1, resource_group_2):
servers = [self.create_random_name(SERVER_NAME_PREFIX, SERVER_NAME_MAX_LENGTH),
self.create_random_name('azuredbclirestore', SERVER_NAME_MAX_LENGTH)]
admin_login = 'cloudsa'
admin_passwords = ['SecretPassword123', 'SecretPassword456']
edition = 'Basic'
old_cu = 100
new_cu = 50
rg = resource_group_1
loc = 'westeurope'
# test create server
self.cmd('{} server create -g {} --name {} -l {} '
'--admin-user {} --admin-password {} '
'--performance-tier {} --compute-units {} --tags key=1'
.format(database_engine, rg, servers[0], loc,
admin_login, admin_passwords[0], edition, old_cu),
checks=[
JMESPathCheck('name', servers[0]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('administratorLogin', admin_login),
JMESPathCheck('sslEnforcement', 'Enabled'),
JMESPathCheck('tags.key', '1'),
JMESPathCheck('sku.capacity', old_cu),
JMESPathCheck('sku.tier', edition)])
# test show server
result = self.cmd('{} server show -g {} --name {}'
.format(database_engine, rg, servers[0]),
checks=[
JMESPathCheck('name', servers[0]),
JMESPathCheck('administratorLogin', admin_login),
JMESPathCheck('sku.capacity', 100),
JMESPathCheck('resourceGroup', rg)]).get_output_in_json()
# test update server
self.cmd('{} server update -g {} --name {} --admin-password {} '
'--ssl-enforcement Disabled --tags key=2'
.format(database_engine, rg, servers[0], admin_passwords[1]),
checks=[
JMESPathCheck('name', servers[0]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('sslEnforcement', 'Disabled'),
JMESPathCheck('sku.tier', edition),
JMESPathCheck('tags.key', '2'),
JMESPathCheck('administratorLogin', admin_login)])
self.cmd('{} server update -g {} --name {} --compute-units {}'
.format(database_engine, rg, servers[0], new_cu),
checks=[
JMESPathCheck('name', servers[0]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('sku.tier', edition),
JMESPathCheck('sku.capacity', new_cu),
JMESPathCheck('administratorLogin', admin_login)])
# test show server
self.cmd('{} server show -g {} --name {}'
.format(database_engine, rg, servers[0]),
checks=[
JMESPathCheck('name', servers[0]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('sslEnforcement', 'Disabled'),
JMESPathCheck('sku.tier', edition),
JMESPathCheck('sku.capacity', new_cu),
JMESPathCheck('tags.key', '2'),
JMESPathCheck('administratorLogin', admin_login)])
# test update server per property
self.cmd('{} server update -g {} --name {} --compute-units {}'
.format(database_engine, rg, servers[0], old_cu),
checks=[
JMESPathCheck('name', servers[0]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('sku.tier', edition),
JMESPathCheck('sku.capacity', old_cu),
JMESPathCheck('administratorLogin', admin_login)])
self.cmd('{} server update -g {} --name {} --ssl-enforcement Enabled'
.format(database_engine, rg, servers[0]),
checks=[
JMESPathCheck('name', servers[0]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('sslEnforcement', 'Enabled'),
JMESPathCheck('sku.tier', edition),
JMESPathCheck('administratorLogin', admin_login)])
self.cmd('{} server update -g {} --name {} --tags key=3'
.format(database_engine, rg, servers[0]),
checks=[
JMESPathCheck('name', servers[0]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('sslEnforcement', 'Enabled'),
JMESPathCheck('tags.key', '3'),
JMESPathCheck('sku.tier', edition),
JMESPathCheck('administratorLogin', admin_login)])
# test restore to a new server, make sure wait at least 5 min after server created.
from time import sleep
sleep(300)
self.cmd('{} server restore -g {} --name {} '
'--source-server {} '
'--restore-point-in-time {}'
.format(database_engine, resource_group_2, servers[1], result['id'],
datetime.utcnow().replace(tzinfo=tzutc()).isoformat()),
checks=[
JMESPathCheck('name', servers[1]),
JMESPathCheck('resourceGroup', resource_group_2),
JMESPathCheck('sku.tier', edition),
JMESPathCheck('administratorLogin', admin_login)])
# test list servers
self.cmd('{} server list -g {}'.format(database_engine, resource_group_2),
checks=[JMESPathCheck('type(@)', 'array')])
# test list servers without resource group
self.cmd('{} server list'.format(database_engine),
checks=[JMESPathCheck('type(@)', 'array')])
# test delete server
self.cmd('{} server delete -g {} --name {} --yes'
.format(database_engine, rg, servers[0]), checks=NoneCheck())
self.cmd('{} server delete -g {} -n {} --yes'
.format(database_engine, resource_group_2, servers[1]), checks=NoneCheck())
# test list server should be 0
self.cmd('{} server list -g {}'.format(database_engine, rg), checks=[NoneCheck()])
class ProxyResourcesMgmtScenarioTest(ScenarioTest):
@ResourceGroupPreparer()
@ServerPreparer(engine_type='mysql')
def test_mysql_proxy_resources_mgmt(self, resource_group, server, database_engine):
self._test_firewall_mgmt(resource_group, server, database_engine)
self._test_db_mgmt(resource_group, server, database_engine)
self._test_configuration_mgmt(resource_group, server, database_engine)
self._test_log_file_mgmt(resource_group, server, database_engine)
@ResourceGroupPreparer()
@ServerPreparer(engine_type='postgres')
def test_postgres_proxy_resources_mgmt(self, resource_group, server, database_engine):
self._test_firewall_mgmt(resource_group, server, database_engine)
self._test_db_mgmt(resource_group, server, database_engine)
self._test_configuration_mgmt(resource_group, server, database_engine)
self._test_log_file_mgmt(resource_group, server, database_engine)
def _test_firewall_mgmt(self, resource_group, server, database_engine):
rg = resource_group
firewall_rule_1 = 'rule1'
start_ip_address_1 = '0.0.0.0'
end_ip_address_1 = '255.255.255.255'
firewall_rule_2 = 'rule2'
start_ip_address_2 = '123.123.123.123'
end_ip_address_2 = '123.123.123.124'
# test firewall-rule create
self.cmd('{} server firewall-rule create -n {} -g {} -s {} '
'--start-ip-address {} --end-ip-address {}'
.format(database_engine, firewall_rule_1, rg, server,
start_ip_address_1, end_ip_address_1),
checks=[
JMESPathCheck('name', firewall_rule_1),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_1),
JMESPathCheck('endIpAddress', end_ip_address_1)])
# test firewall-rule show
self.cmd('{} server firewall-rule show --name {} -g {} --server {}'
.format(database_engine, firewall_rule_1, rg, server),
checks=[
JMESPathCheck('name', firewall_rule_1),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_1),
JMESPathCheck('endIpAddress', end_ip_address_1)])
# test firewall-rule update
self.cmd('{} server firewall-rule update -n {} -g {} -s {} '
'--start-ip-address {} --end-ip-address {}'
.format(database_engine, firewall_rule_1, rg, server,
start_ip_address_2, end_ip_address_2),
checks=[
JMESPathCheck('name', firewall_rule_1),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_2),
JMESPathCheck('endIpAddress', end_ip_address_2)])
self.cmd('{} server firewall-rule update --name {} -g {} --server {} '
'--start-ip-address {}'
.format(database_engine, firewall_rule_1, rg, server,
start_ip_address_1),
checks=[
JMESPathCheck('name', firewall_rule_1),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_1),
JMESPathCheck('endIpAddress', end_ip_address_2)])
self.cmd('{} server firewall-rule update -n {} -g {} -s {} '
'--end-ip-address {}'
.format(database_engine, firewall_rule_1, rg, server,
end_ip_address_1),
checks=[
JMESPathCheck('name', firewall_rule_1),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_1),
JMESPathCheck('endIpAddress', end_ip_address_1)])
# test firewall-rule create another rule
self.cmd('{} server firewall-rule create --name {} -g {} --server {} '
'--start-ip-address {} --end-ip-address {}'
.format(database_engine, firewall_rule_2, rg, server,
start_ip_address_2, end_ip_address_2),
checks=[
JMESPathCheck('name', firewall_rule_2),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_2),
JMESPathCheck('endIpAddress', end_ip_address_2)])
# test firewall-rule list
self.cmd('{} server firewall-rule list -g {} -s {}'
.format(database_engine, rg, server), checks=[JMESPathCheck('length(@)', 2)])
self.cmd('{} server firewall-rule delete --name {} -g {} --server {} --yes'
.format(database_engine, firewall_rule_1, rg, server), checks=NoneCheck())
self.cmd('{} server firewall-rule list -g {} --server {}'
.format(database_engine, rg, server), checks=[JMESPathCheck('length(@)', 1)])
self.cmd('{} server firewall-rule delete -n {} -g {} -s {} --yes'
.format(database_engine, firewall_rule_2, rg, server), checks=NoneCheck())
self.cmd('{} server firewall-rule list -g {} --server {}'
.format(database_engine, rg, server), checks=[NoneCheck()])
def _test_db_mgmt(self, resource_group, server, database_engine):
rg = resource_group
check = NoneCheck() if database_engine == 'mysql' else JMESPathCheck('type(@)', 'array')
self.cmd('{} db list -g {} -s {}'
.format(database_engine, rg, server),
checks=[check])
def _test_configuration_mgmt(self, resource_group, server, database_engine):
rg = resource_group
if database_engine == 'mysql':
config_name = 'log_slow_admin_statements'
default_value = 'OFF'
new_value = 'ON'
else:
config_name = 'array_nulls'
default_value = 'on'
new_value = 'off'
# test show configuration
self.cmd('{} server configuration show --name {} -g {} --server {}'
.format(database_engine, config_name, rg, server),
checks=[
JMESPathCheck('name', config_name),
JMESPathCheck('value', default_value),
JMESPathCheck('source', 'system-default')])
# test update configuration
self.cmd('{} server configuration set -n {} -g {} -s {} --value {}'
.format(database_engine, config_name, rg, server, new_value),
checks=[
JMESPathCheck('name', config_name),
JMESPathCheck('value', new_value),
JMESPathCheck('source', 'user-override')])
self.cmd('{} server configuration set -n {} -g {} -s {}'
.format(database_engine, config_name, rg, server),
checks=[
JMESPathCheck('name', config_name),
JMESPathCheck('value', default_value)])
# test list configurations
self.cmd('{} server configuration list -g {} -s {}'
.format(database_engine, rg, server),
checks=[JMESPathCheck('type(@)', 'array')])
def _test_log_file_mgmt(self, resource_group, server, database_engine):
rg = resource_group
if database_engine == 'mysql':
config_name = 'slow_query_log'
new_value = 'ON'
# test update configuration
self.cmd('{} server configuration set -n {} -g {} -s {} --value {}'
.format(database_engine, config_name, rg, server, new_value),
checks=[
JMESPathCheck('name', config_name),
JMESPathCheck('value', new_value)])
# test list log files
result = self.cmd('{} server-logs list -g {} -s {} --file-last-written 43800' # ensure recording good for at least 5 years!
.format(database_engine, rg, server),
checks=[
JMESPathCheck('length(@)', 1),
JMESPathCheck('type(@)', 'array')]).get_output_in_json()
self.assertIsNotNone(result[0]['name'])
if __name__ == '__main__':
import unittest
unittest.main()
| 47.713911
| 132
| 0.564442
|
2610658266b130c1b5972a0cd36b262b99e1c37d
| 2,331
|
py
|
Python
|
connectome/models/evaluation.py
|
JanaGauss/Connectome
|
9b59aabfb4040201c72d7ff239b50bb47f092ad1
|
[
"MIT"
] | 1
|
2022-03-22T15:58:31.000Z
|
2022-03-22T15:58:31.000Z
|
connectome/models/evaluation.py
|
JanaGauss/Connectome
|
9b59aabfb4040201c72d7ff239b50bb47f092ad1
|
[
"MIT"
] | 6
|
2022-03-16T16:20:14.000Z
|
2022-03-17T10:54:13.000Z
|
connectome/models/evaluation.py
|
JanaGauss/Connectome
|
9b59aabfb4040201c72d7ff239b50bb47f092ad1
|
[
"MIT"
] | null | null | null |
"""
function to evaluate the fitted model on test data
"""
from sklearn.metrics import accuracy_score, roc_auc_score, precision_score, recall_score, f1_score, mean_squared_error, \
mean_absolute_error, r2_score
import numpy as np
import pandas as pd
def model_evaluation(model, X_test, y_test):
"""
Evaluates the model based on a set of metrics.
Classification: Accuracy, Precision, Recall, F1 and AUC.
Regression: MSE, MAE and R2.
Checkout https://scikit-learn.org/stable/modules/classes.html#module-sklearn.metrics for details.
For CNN make sure that the test_dataset was also transformed.
Examples:
>>> # Evaluate any model.
>>> model_evaluation(model, X_test, y_test)
Args:
model: A fitted ML model
X_test: The test dataset to be evaluated
y_test: The true labels
custom_metrics: A list of custom metrics
Returns:
Returns a dataframe containing model evaluations depending on prespecified metrics
"""
if type(X_test) == list:
assert X_test[0].shape[0] == len(y_test), 'X_test and y_test are not of equal length'
else:
assert len(X_test) == len(y_test), 'X_test and y_test are not of equal length'
if len(np.unique(y_test)) == 2: # classification setting
if model.__class__.__name__ in ['LogisticRegressionCV', 'RandomForestClassifier']:
predictions = model.predict(X_test) # class labels
score = model.predict_proba(X_test)[:, 1] # probabilities
else:
score = model.predict(X_test)
predictions = np.round(score)
accuracy = accuracy_score(y_test, predictions)
precision = precision_score(y_test, predictions)
recall = recall_score(y_test, predictions)
f1 = f1_score(y_test, predictions)
auc = roc_auc_score(y_test, score)
return pd.DataFrame(
{"Accuracy": [accuracy], "Precision": [precision], "Recall": [recall], "F1": [f1], "AUC": [auc]})
else:
predictions = model.predict(X_test)
# regression setting
mse = mean_squared_error(y_test, predictions)
mae = mean_absolute_error(y_test, predictions)
r2 = r2_score(y_test, predictions)
return pd.DataFrame({"Mean Squared Error": [mse], "Mean Absolute Error": [mae], "Recall": [r2]})
| 37.596774
| 121
| 0.670528
|
c052e64a4254b46fffdf2df8aee0571b90fba37a
| 293
|
py
|
Python
|
ccarl/plotting/utils.py
|
andrewguy/CCARL
|
0afda67bcc58be2b6b6bf426cccaab04453c0590
|
[
"MIT"
] | 2
|
2020-05-13T12:50:44.000Z
|
2020-07-27T08:32:42.000Z
|
ccarl/plotting/utils.py
|
andrewguy/CCARL
|
0afda67bcc58be2b6b6bf426cccaab04453c0590
|
[
"MIT"
] | 1
|
2020-04-30T15:33:45.000Z
|
2021-11-30T01:53:18.000Z
|
ccarl/plotting/utils.py
|
andrewguy/CCARL
|
0afda67bcc58be2b6b6bf426cccaab04453c0590
|
[
"MIT"
] | 2
|
2020-12-05T00:25:43.000Z
|
2022-02-10T13:58:35.000Z
|
'''Plotting utility functions'''
def remove_top_right_borders(ax):
'''Remove top and right borders from Matplotlib axis'''
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
| 29.3
| 59
| 0.720137
|
aedaf67dbe5dccb9d33fc88a346a2868fe4999d7
| 19,149
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20200701/web_application_firewall_policy.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20200701/web_application_firewall_policy.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20200701/web_application_firewall_policy.py
|
polivbr/pulumi-azure-native
|
09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['WebApplicationFirewallPolicyArgs', 'WebApplicationFirewallPolicy']
@pulumi.input_type
class WebApplicationFirewallPolicyArgs:
def __init__(__self__, *,
managed_rules: pulumi.Input['ManagedRulesDefinitionArgs'],
resource_group_name: pulumi.Input[str],
custom_rules: Optional[pulumi.Input[Sequence[pulumi.Input['WebApplicationFirewallCustomRuleArgs']]]] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
policy_name: Optional[pulumi.Input[str]] = None,
policy_settings: Optional[pulumi.Input['PolicySettingsArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a WebApplicationFirewallPolicy resource.
:param pulumi.Input['ManagedRulesDefinitionArgs'] managed_rules: Describes the managedRules structure.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[Sequence[pulumi.Input['WebApplicationFirewallCustomRuleArgs']]] custom_rules: The custom rules inside the policy.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] policy_name: The name of the policy.
:param pulumi.Input['PolicySettingsArgs'] policy_settings: The PolicySettings for policy.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "managed_rules", managed_rules)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if custom_rules is not None:
pulumi.set(__self__, "custom_rules", custom_rules)
if id is not None:
pulumi.set(__self__, "id", id)
if location is not None:
pulumi.set(__self__, "location", location)
if policy_name is not None:
pulumi.set(__self__, "policy_name", policy_name)
if policy_settings is not None:
pulumi.set(__self__, "policy_settings", policy_settings)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="managedRules")
def managed_rules(self) -> pulumi.Input['ManagedRulesDefinitionArgs']:
"""
Describes the managedRules structure.
"""
return pulumi.get(self, "managed_rules")
@managed_rules.setter
def managed_rules(self, value: pulumi.Input['ManagedRulesDefinitionArgs']):
pulumi.set(self, "managed_rules", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="customRules")
def custom_rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['WebApplicationFirewallCustomRuleArgs']]]]:
"""
The custom rules inside the policy.
"""
return pulumi.get(self, "custom_rules")
@custom_rules.setter
def custom_rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['WebApplicationFirewallCustomRuleArgs']]]]):
pulumi.set(self, "custom_rules", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="policyName")
def policy_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the policy.
"""
return pulumi.get(self, "policy_name")
@policy_name.setter
def policy_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_name", value)
@property
@pulumi.getter(name="policySettings")
def policy_settings(self) -> Optional[pulumi.Input['PolicySettingsArgs']]:
"""
The PolicySettings for policy.
"""
return pulumi.get(self, "policy_settings")
@policy_settings.setter
def policy_settings(self, value: Optional[pulumi.Input['PolicySettingsArgs']]):
pulumi.set(self, "policy_settings", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class WebApplicationFirewallPolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
custom_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['WebApplicationFirewallCustomRuleArgs']]]]] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
managed_rules: Optional[pulumi.Input[pulumi.InputType['ManagedRulesDefinitionArgs']]] = None,
policy_name: Optional[pulumi.Input[str]] = None,
policy_settings: Optional[pulumi.Input[pulumi.InputType['PolicySettingsArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Defines web application firewall policy.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['WebApplicationFirewallCustomRuleArgs']]]] custom_rules: The custom rules inside the policy.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[pulumi.InputType['ManagedRulesDefinitionArgs']] managed_rules: Describes the managedRules structure.
:param pulumi.Input[str] policy_name: The name of the policy.
:param pulumi.Input[pulumi.InputType['PolicySettingsArgs']] policy_settings: The PolicySettings for policy.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: WebApplicationFirewallPolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Defines web application firewall policy.
:param str resource_name: The name of the resource.
:param WebApplicationFirewallPolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(WebApplicationFirewallPolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
custom_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['WebApplicationFirewallCustomRuleArgs']]]]] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
managed_rules: Optional[pulumi.Input[pulumi.InputType['ManagedRulesDefinitionArgs']]] = None,
policy_name: Optional[pulumi.Input[str]] = None,
policy_settings: Optional[pulumi.Input[pulumi.InputType['PolicySettingsArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = WebApplicationFirewallPolicyArgs.__new__(WebApplicationFirewallPolicyArgs)
__props__.__dict__["custom_rules"] = custom_rules
__props__.__dict__["id"] = id
__props__.__dict__["location"] = location
if managed_rules is None and not opts.urn:
raise TypeError("Missing required property 'managed_rules'")
__props__.__dict__["managed_rules"] = managed_rules
__props__.__dict__["policy_name"] = policy_name
__props__.__dict__["policy_settings"] = policy_settings
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
__props__.__dict__["application_gateways"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["http_listeners"] = None
__props__.__dict__["name"] = None
__props__.__dict__["path_based_rules"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["resource_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/v20200701:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20181201:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20181201:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20190201:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20190201:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20190401:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20190401:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20190601:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20190601:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20190701:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20190701:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20190801:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20190801:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20190901:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20190901:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20191101:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20191101:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20191201:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20191201:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20200301:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20200301:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20200401:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20200401:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20200501:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20200501:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20200601:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20200601:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20200801:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20200801:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20201101:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20201101:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20210201:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20210201:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-native:network/v20210301:WebApplicationFirewallPolicy"), pulumi.Alias(type_="azure-nextgen:network/v20210301:WebApplicationFirewallPolicy")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(WebApplicationFirewallPolicy, __self__).__init__(
'azure-native:network/v20200701:WebApplicationFirewallPolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'WebApplicationFirewallPolicy':
"""
Get an existing WebApplicationFirewallPolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = WebApplicationFirewallPolicyArgs.__new__(WebApplicationFirewallPolicyArgs)
__props__.__dict__["application_gateways"] = None
__props__.__dict__["custom_rules"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["http_listeners"] = None
__props__.__dict__["location"] = None
__props__.__dict__["managed_rules"] = None
__props__.__dict__["name"] = None
__props__.__dict__["path_based_rules"] = None
__props__.__dict__["policy_settings"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["resource_state"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return WebApplicationFirewallPolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="applicationGateways")
def application_gateways(self) -> pulumi.Output[Sequence['outputs.ApplicationGatewayResponse']]:
"""
A collection of references to application gateways.
"""
return pulumi.get(self, "application_gateways")
@property
@pulumi.getter(name="customRules")
def custom_rules(self) -> pulumi.Output[Optional[Sequence['outputs.WebApplicationFirewallCustomRuleResponse']]]:
"""
The custom rules inside the policy.
"""
return pulumi.get(self, "custom_rules")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="httpListeners")
def http_listeners(self) -> pulumi.Output[Sequence['outputs.SubResourceResponse']]:
"""
A collection of references to application gateway http listeners.
"""
return pulumi.get(self, "http_listeners")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="managedRules")
def managed_rules(self) -> pulumi.Output['outputs.ManagedRulesDefinitionResponse']:
"""
Describes the managedRules structure.
"""
return pulumi.get(self, "managed_rules")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pathBasedRules")
def path_based_rules(self) -> pulumi.Output[Sequence['outputs.SubResourceResponse']]:
"""
A collection of references to application gateway path rules.
"""
return pulumi.get(self, "path_based_rules")
@property
@pulumi.getter(name="policySettings")
def policy_settings(self) -> pulumi.Output[Optional['outputs.PolicySettingsResponse']]:
"""
The PolicySettings for policy.
"""
return pulumi.get(self, "policy_settings")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the web application firewall policy resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceState")
def resource_state(self) -> pulumi.Output[str]:
"""
Resource status of the policy.
"""
return pulumi.get(self, "resource_state")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
| 49.737662
| 3,123
| 0.682647
|
addfa86da927cbc17c6a251b534219db012efc6d
| 755
|
py
|
Python
|
profiles_rest/urls.py
|
Gustah-araujo/Django-pratice
|
e2ab7574838a5a3be2aecfee745d27bff7754e7f
|
[
"MIT"
] | null | null | null |
profiles_rest/urls.py
|
Gustah-araujo/Django-pratice
|
e2ab7574838a5a3be2aecfee745d27bff7754e7f
|
[
"MIT"
] | null | null | null |
profiles_rest/urls.py
|
Gustah-araujo/Django-pratice
|
e2ab7574838a5a3be2aecfee745d27bff7754e7f
|
[
"MIT"
] | null | null | null |
"""profiles_rest URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| 34.318182
| 77
| 0.711258
|
83cece6fc3f519be001b02975f0ee23d20b9b5eb
| 177
|
py
|
Python
|
check_nsqd.py
|
phobson/pynsqd
|
84def12e8bfaa44307f9e235470661452507d606
|
[
"MIT"
] | null | null | null |
check_nsqd.py
|
phobson/pynsqd
|
84def12e8bfaa44307f9e235470661452507d606
|
[
"MIT"
] | null | null | null |
check_nsqd.py
|
phobson/pynsqd
|
84def12e8bfaa44307f9e235470661452507d606
|
[
"MIT"
] | null | null | null |
import sys
import matplotlib
matplotlib.use('agg')
from matplotlib.pyplot import style
style.use('classic')
import pynsqd
status = pynsqd.test(*sys.argv[1:])
sys.exit(status)
| 16.090909
| 35
| 0.768362
|
d03414bbc116321facb0bc11b6026ce4bf67055f
| 424
|
py
|
Python
|
django_dumpslow_example/example/views.py
|
lamby/django-dumpslow
|
0c2c46d16510596ee830cc97cb69b53992ed3fe6
|
[
"BSD-2-Clause"
] | 11
|
2016-03-21T17:39:03.000Z
|
2020-10-13T18:10:46.000Z
|
django_dumpslow_example/example/views.py
|
lamby/django-dumpslow
|
0c2c46d16510596ee830cc97cb69b53992ed3fe6
|
[
"BSD-2-Clause"
] | 3
|
2019-04-15T07:40:40.000Z
|
2019-07-07T14:11:45.000Z
|
django_dumpslow_example/example/views.py
|
lamby/django-dumpslow
|
0c2c46d16510596ee830cc97cb69b53992ed3fe6
|
[
"BSD-2-Clause"
] | 3
|
2019-04-11T09:42:21.000Z
|
2019-07-07T07:02:47.000Z
|
import time
from django.http import HttpResponse
from django.shortcuts import render_to_response
def index(request):
return render_to_response('index.html')
def slow(request):
time.sleep(2)
return HttpResponse('This page should have taken >=2s to render.')
class slow2(object):
def __call__(self, request):
time.sleep(5)
return HttpResponse('This page should have taken >=5s to render.')
| 24.941176
| 74
| 0.724057
|
e3d52970973d6f813ecb990ed0db57fa44a6afb7
| 366
|
py
|
Python
|
benchmark/sapcloudsdkpipeline/protobuf/run.py
|
sourcemeta/json-size-benchmark
|
ec18002407fc97ea3181874b84c6342841cfb0a9
|
[
"Apache-2.0"
] | null | null | null |
benchmark/sapcloudsdkpipeline/protobuf/run.py
|
sourcemeta/json-size-benchmark
|
ec18002407fc97ea3181874b84c6342841cfb0a9
|
[
"Apache-2.0"
] | 4
|
2022-01-15T01:27:24.000Z
|
2022-01-19T00:47:20.000Z
|
benchmark/sapcloudsdkpipeline/protobuf/run.py
|
sourcemeta/json-size-benchmark
|
ec18002407fc97ea3181874b84c6342841cfb0a9
|
[
"Apache-2.0"
] | null | null | null |
def encode(json, schema):
payload = schema.Main()
payload.general = json['general'] or 0
payload.stages = json['stages'] or 0
payload.steps = json['steps'] or 0
return payload
def decode(payload):
return {
'general': payload.general or None,
'stages': payload.general or None,
'steps': payload.general or None
}
| 24.4
| 43
| 0.620219
|
56262aac36f1dcf6a2ef689677ad4a152d732480
| 425
|
py
|
Python
|
tests/readwrite/test_paula.py
|
arne-cl/discoursegraphs
|
4e14688e19c980ac9bbac75ff1bf5d751ef44ac3
|
[
"BSD-3-Clause"
] | 41
|
2015-02-20T00:35:39.000Z
|
2022-03-15T13:54:13.000Z
|
tests/readwrite/test_paula.py
|
arne-cl/discoursegraphs
|
4e14688e19c980ac9bbac75ff1bf5d751ef44ac3
|
[
"BSD-3-Clause"
] | 68
|
2015-01-09T18:07:38.000Z
|
2021-10-06T16:30:43.000Z
|
tests/readwrite/test_paula.py
|
arne-cl/discoursegraphs
|
4e14688e19c980ac9bbac75ff1bf5d751ef44ac3
|
[
"BSD-3-Clause"
] | 8
|
2015-02-20T00:35:48.000Z
|
2021-10-30T14:09:03.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Arne Neumann <discoursegraphs.programming@arne.cl>
from tempfile import NamedTemporaryFile, mkdtemp
from pytest import maz_1423 # global fixture
import discoursegraphs as dg
"""
Basic tests for the gexf output format.
"""
def test_write_paula():
"""convert a PCC document into a paula file."""
temp_dir = mkdtemp()
dg.write_paula(maz_1423, temp_dir)
| 21.25
| 60
| 0.722353
|
7885701ebd5d168f645f4eaa276703d819f32436
| 18,367
|
py
|
Python
|
tools/project-creator/Python2.6.6/Lib/test/test_py3kwarn.py
|
gohopo/nineck.ca
|
9601f5ae4c20f8a3ea27b06551556fa5e1eecce3
|
[
"MIT"
] | 81
|
2017-03-13T08:24:01.000Z
|
2021-04-02T09:48:38.000Z
|
tools/project-creator/Python2.6.6/Lib/test/test_py3kwarn.py
|
gohopo/nineck.ca
|
9601f5ae4c20f8a3ea27b06551556fa5e1eecce3
|
[
"MIT"
] | 6
|
2017-04-30T08:36:55.000Z
|
2017-09-22T01:37:28.000Z
|
tools/project-creator/Python2.6.6/Lib/test/test_py3kwarn.py
|
gohopo/nineck.ca
|
9601f5ae4c20f8a3ea27b06551556fa5e1eecce3
|
[
"MIT"
] | 41
|
2017-03-18T14:11:58.000Z
|
2021-04-14T05:06:09.000Z
|
import unittest
import sys
from test.test_support import (check_warnings, CleanImport,
TestSkipped, run_unittest)
import warnings
from contextlib import nested
if not sys.py3kwarning:
raise TestSkipped('%s must be run with the -3 flag' % __name__)
try:
from test.test_support import __warningregistry__ as _registry
except ImportError:
def check_deprecated_module(module_name):
return False
else:
past_warnings = _registry.keys()
del _registry
def check_deprecated_module(module_name):
"""Lookup the past warnings for module already loaded using
test_support.import_module(..., deprecated=True)
"""
return any(module_name in msg and ' removed' in msg
and issubclass(cls, DeprecationWarning)
and (' module' in msg or ' package' in msg)
for (msg, cls, line) in past_warnings)
def reset_module_registry(module):
try:
registry = module.__warningregistry__
except AttributeError:
pass
else:
registry.clear()
class TestPy3KWarnings(unittest.TestCase):
def assertWarning(self, _, warning, expected_message):
self.assertEqual(str(warning.message), expected_message)
def assertNoWarning(self, _, recorder):
self.assertEqual(len(recorder.warnings), 0)
def test_backquote(self):
expected = 'backquote not supported in 3.x; use repr()'
with check_warnings() as w:
exec "`2`" in {}
self.assertWarning(None, w, expected)
def test_paren_arg_names(self):
expected = 'parenthesized argument names are invalid in 3.x'
def check(s):
exec s in {}
self.assertWarning(None, w, expected)
with check_warnings() as w:
check("def f((x)): pass")
check("def f((((x))), (y)): pass")
check("def f((x), (((y))), m=32): pass")
# Something like def f((a, (b))): pass will raise the tuple
# unpacking warning.
def test_bool_assign(self):
# So we don't screw up our globals
def safe_exec(expr):
def f(**kwargs): pass
exec expr in {'f' : f}
expected = "assignment to True or False is forbidden in 3.x"
with check_warnings() as w:
safe_exec("True = False")
self.assertWarning(None, w, expected)
w.reset()
safe_exec("False = True")
self.assertWarning(None, w, expected)
w.reset()
try:
safe_exec("obj.False = True")
except NameError: pass
self.assertWarning(None, w, expected)
w.reset()
try:
safe_exec("obj.True = False")
except NameError: pass
self.assertWarning(None, w, expected)
w.reset()
safe_exec("def False(): pass")
self.assertWarning(None, w, expected)
w.reset()
safe_exec("def True(): pass")
self.assertWarning(None, w, expected)
w.reset()
safe_exec("class False: pass")
self.assertWarning(None, w, expected)
w.reset()
safe_exec("class True: pass")
self.assertWarning(None, w, expected)
w.reset()
safe_exec("def f(True=43): pass")
self.assertWarning(None, w, expected)
w.reset()
safe_exec("def f(False=None): pass")
self.assertWarning(None, w, expected)
w.reset()
safe_exec("f(False=True)")
self.assertWarning(None, w, expected)
w.reset()
safe_exec("f(True=1)")
self.assertWarning(None, w, expected)
def test_type_inequality_comparisons(self):
expected = 'type inequality comparisons not supported in 3.x'
with check_warnings() as w:
self.assertWarning(int < str, w, expected)
w.reset()
self.assertWarning(type < object, w, expected)
def test_object_inequality_comparisons(self):
expected = 'comparing unequal types not supported in 3.x'
with check_warnings() as w:
self.assertWarning(str < [], w, expected)
w.reset()
self.assertWarning(object() < (1, 2), w, expected)
def test_dict_inequality_comparisons(self):
expected = 'dict inequality comparisons not supported in 3.x'
with check_warnings() as w:
self.assertWarning({} < {2:3}, w, expected)
w.reset()
self.assertWarning({} <= {}, w, expected)
w.reset()
self.assertWarning({} > {2:3}, w, expected)
w.reset()
self.assertWarning({2:3} >= {}, w, expected)
def test_cell_inequality_comparisons(self):
expected = 'cell comparisons not supported in 3.x'
def f(x):
def g():
return x
return g
cell0, = f(0).func_closure
cell1, = f(1).func_closure
with check_warnings() as w:
self.assertWarning(cell0 == cell1, w, expected)
w.reset()
self.assertWarning(cell0 < cell1, w, expected)
def test_code_inequality_comparisons(self):
expected = 'code inequality comparisons not supported in 3.x'
def f(x):
pass
def g(x):
pass
with check_warnings() as w:
self.assertWarning(f.func_code < g.func_code, w, expected)
w.reset()
self.assertWarning(f.func_code <= g.func_code, w, expected)
w.reset()
self.assertWarning(f.func_code >= g.func_code, w, expected)
w.reset()
self.assertWarning(f.func_code > g.func_code, w, expected)
def test_builtin_function_or_method_comparisons(self):
expected = ('builtin_function_or_method '
'order comparisons not supported in 3.x')
func = eval
meth = {}.get
with check_warnings() as w:
self.assertWarning(func < meth, w, expected)
w.reset()
self.assertWarning(func > meth, w, expected)
w.reset()
self.assertWarning(meth <= func, w, expected)
w.reset()
self.assertWarning(meth >= func, w, expected)
w.reset()
self.assertNoWarning(meth == func, w)
self.assertNoWarning(meth != func, w)
lam = lambda x: x
self.assertNoWarning(lam == func, w)
self.assertNoWarning(lam != func, w)
def test_sort_cmp_arg(self):
expected = "the cmp argument is not supported in 3.x"
lst = range(5)
cmp = lambda x,y: -1
with check_warnings() as w:
self.assertWarning(lst.sort(cmp=cmp), w, expected)
w.reset()
self.assertWarning(sorted(lst, cmp=cmp), w, expected)
w.reset()
self.assertWarning(lst.sort(cmp), w, expected)
w.reset()
self.assertWarning(sorted(lst, cmp), w, expected)
def test_sys_exc_clear(self):
expected = 'sys.exc_clear() not supported in 3.x; use except clauses'
with check_warnings() as w:
self.assertWarning(sys.exc_clear(), w, expected)
def test_methods_members(self):
expected = '__members__ and __methods__ not supported in 3.x'
class C:
__methods__ = ['a']
__members__ = ['b']
c = C()
with check_warnings() as w:
self.assertWarning(dir(c), w, expected)
def test_softspace(self):
expected = 'file.softspace not supported in 3.x'
with file(__file__) as f:
with check_warnings() as w:
self.assertWarning(f.softspace, w, expected)
def set():
f.softspace = 0
with check_warnings() as w:
self.assertWarning(set(), w, expected)
def test_slice_methods(self):
class Spam(object):
def __getslice__(self, i, j): pass
def __setslice__(self, i, j, what): pass
def __delslice__(self, i, j): pass
class Egg:
def __getslice__(self, i, h): pass
def __setslice__(self, i, j, what): pass
def __delslice__(self, i, j): pass
expected = "in 3.x, __{0}slice__ has been removed; use __{0}item__"
for obj in (Spam(), Egg()):
with check_warnings() as w:
self.assertWarning(obj[1:2], w, expected.format('get'))
w.reset()
del obj[3:4]
self.assertWarning(None, w, expected.format('del'))
w.reset()
obj[4:5] = "eggs"
self.assertWarning(None, w, expected.format('set'))
def test_tuple_parameter_unpacking(self):
expected = "tuple parameter unpacking has been removed in 3.x"
with check_warnings() as w:
exec "def f((a, b)): pass"
self.assertWarning(None, w, expected)
def test_buffer(self):
expected = 'buffer() not supported in 3.x'
with check_warnings() as w:
self.assertWarning(buffer('a'), w, expected)
def test_file_xreadlines(self):
expected = ("f.xreadlines() not supported in 3.x, "
"try 'for line in f' instead")
with file(__file__) as f:
with check_warnings() as w:
self.assertWarning(f.xreadlines(), w, expected)
def test_hash_inheritance(self):
with check_warnings() as w:
# With object as the base class
class WarnOnlyCmp(object):
def __cmp__(self, other): pass
self.assertEqual(len(w.warnings), 0)
w.reset()
class WarnOnlyEq(object):
def __eq__(self, other): pass
self.assertEqual(len(w.warnings), 1)
self.assertWarning(None, w,
"Overriding __eq__ blocks inheritance of __hash__ in 3.x")
w.reset()
class WarnCmpAndEq(object):
def __cmp__(self, other): pass
def __eq__(self, other): pass
self.assertEqual(len(w.warnings), 1)
self.assertWarning(None, w,
"Overriding __eq__ blocks inheritance of __hash__ in 3.x")
w.reset()
class NoWarningOnlyHash(object):
def __hash__(self): pass
self.assertEqual(len(w.warnings), 0)
# With an intermediate class in the heirarchy
class DefinesAllThree(object):
def __cmp__(self, other): pass
def __eq__(self, other): pass
def __hash__(self): pass
class WarnOnlyCmp(DefinesAllThree):
def __cmp__(self, other): pass
self.assertEqual(len(w.warnings), 0)
w.reset()
class WarnOnlyEq(DefinesAllThree):
def __eq__(self, other): pass
self.assertEqual(len(w.warnings), 1)
self.assertWarning(None, w,
"Overriding __eq__ blocks inheritance of __hash__ in 3.x")
w.reset()
class WarnCmpAndEq(DefinesAllThree):
def __cmp__(self, other): pass
def __eq__(self, other): pass
self.assertEqual(len(w.warnings), 1)
self.assertWarning(None, w,
"Overriding __eq__ blocks inheritance of __hash__ in 3.x")
w.reset()
class NoWarningOnlyHash(DefinesAllThree):
def __hash__(self): pass
self.assertEqual(len(w.warnings), 0)
class TestStdlibRemovals(unittest.TestCase):
# test.testall not tested as it executes all unit tests as an
# import side-effect.
all_platforms = ('audiodev', 'imputil', 'mutex', 'user', 'new', 'rexec',
'Bastion', 'compiler', 'dircache', 'mimetools',
'fpformat', 'ihooks', 'mhlib', 'statvfs', 'htmllib',
'sgmllib', 'rfc822', 'sunaudio')
inclusive_platforms = {'irix' : ('pure', 'AL', 'al', 'CD', 'cd', 'cddb',
'cdplayer', 'CL', 'cl', 'DEVICE', 'GL',
'gl', 'ERRNO', 'FILE', 'FL', 'flp', 'fl',
'fm', 'GET', 'GLWS', 'imgfile', 'IN',
'IOCTL', 'jpeg', 'panel', 'panelparser',
'readcd', 'SV', 'torgb', 'WAIT'),
'darwin' : ('autoGIL', 'Carbon', 'OSATerminology',
'icglue', 'Nav', 'MacOS', 'aepack',
'aetools', 'aetypes', 'applesingle',
'appletrawmain', 'appletrunner',
'argvemulator', 'bgenlocations',
'EasyDialogs', 'macerrors', 'macostools',
'findertools', 'FrameWork', 'ic',
'gensuitemodule', 'icopen', 'macresource',
'MiniAEFrame', 'pimp', 'PixMapWrapper',
'terminalcommand', 'videoreader',
'_builtinSuites', 'CodeWarrior',
'Explorer', 'Finder', 'Netscape',
'StdSuites', 'SystemEvents', 'Terminal',
'cfmfile', 'bundlebuilder', 'buildtools',
'ColorPicker', 'Audio_mac'),
'sunos5' : ('sunaudiodev', 'SUNAUDIODEV'),
}
optional_modules = ('bsddb185', 'Canvas', 'dl', 'linuxaudiodev', 'imageop',
'sv', 'bsddb', 'dbhash')
def check_removal(self, module_name, optional=False):
"""Make sure the specified module, when imported, raises a
DeprecationWarning and specifies itself in the message."""
with nested(CleanImport(module_name), warnings.catch_warnings()):
warnings.filterwarnings("error", ".+ (module|package) .+ removed",
DeprecationWarning, __name__)
warnings.filterwarnings("error", ".+ removed .+ (module|package)",
DeprecationWarning, __name__)
try:
__import__(module_name, level=0)
except DeprecationWarning as exc:
self.assert_(module_name in exc.args[0],
"%s warning didn't contain module name"
% module_name)
except ImportError:
if not optional:
self.fail("Non-optional module {0} raised an "
"ImportError.".format(module_name))
else:
# For extension modules, check the __warningregistry__.
# They won't rerun their init code even with CleanImport.
if not check_deprecated_module(module_name):
self.fail("DeprecationWarning not raised for {0}"
.format(module_name))
def test_platform_independent_removals(self):
# Make sure that the modules that are available on all platforms raise
# the proper DeprecationWarning.
for module_name in self.all_platforms:
self.check_removal(module_name)
def test_platform_specific_removals(self):
# Test the removal of platform-specific modules.
for module_name in self.inclusive_platforms.get(sys.platform, []):
self.check_removal(module_name, optional=True)
def test_optional_module_removals(self):
# Test the removal of modules that may or may not be built.
for module_name in self.optional_modules:
self.check_removal(module_name, optional=True)
def test_os_path_walk(self):
msg = "In 3.x, os.path.walk is removed in favor of os.walk."
def dumbo(where, names, args): pass
for path_mod in ("ntpath", "macpath", "os2emxpath", "posixpath"):
mod = __import__(path_mod)
reset_module_registry(mod)
with check_warnings() as w:
mod.walk("crashers", dumbo, None)
self.assertEquals(str(w.message), msg)
def test_commands_members(self):
import commands
# commands module tests may have already triggered this warning
reset_module_registry(commands)
members = {"mk2arg" : 2, "mkarg" : 1, "getstatus" : 1}
for name, arg_count in members.items():
with warnings.catch_warnings():
warnings.filterwarnings("error")
func = getattr(commands, name)
self.assertRaises(DeprecationWarning, func, *([None]*arg_count))
def test_reduce_move(self):
from operator import add
# reduce tests may have already triggered this warning
reset_module_registry(unittest)
with warnings.catch_warnings():
warnings.filterwarnings("error", "reduce")
self.assertRaises(DeprecationWarning, reduce, add, range(10))
def test_mutablestring_removal(self):
# UserString.MutableString has been removed in 3.0.
import UserString
# UserString tests may have already triggered this warning
reset_module_registry(UserString)
with warnings.catch_warnings():
warnings.filterwarnings("error", ".*MutableString",
DeprecationWarning)
self.assertRaises(DeprecationWarning, UserString.MutableString)
def test_main():
run_unittest(TestPy3KWarnings,
TestStdlibRemovals)
if __name__ == '__main__':
test_main()
| 42.320276
| 81
| 0.537268
|
1978ff784ec584176661aca5e1682a8bbe322702
| 1,056
|
py
|
Python
|
source/dataset/ClueWordsDataset.py
|
celsofranssa/CluWords
|
2c08932eeec8cc6a3a6d97782577df8768c21765
|
[
"MIT"
] | 1
|
2021-08-18T18:15:11.000Z
|
2021-08-18T18:15:11.000Z
|
source/dataset/ClueWordsDataset.py
|
celsofranssa/ClueWords
|
2c08932eeec8cc6a3a6d97782577df8768c21765
|
[
"MIT"
] | null | null | null |
source/dataset/ClueWordsDataset.py
|
celsofranssa/ClueWords
|
2c08932eeec8cc6a3a6d97782577df8768c21765
|
[
"MIT"
] | null | null | null |
import json
import pickle
import torch
from torch.utils.data import Dataset
class ClueWordsDataset(Dataset):
"""MNIST Dataset.
"""
def __init__(self, samples, ids_path, tokenizer, max_length):
super(ClueWordsDataset, self).__init__()
self.samples = samples
self.tokenizer = tokenizer
self.max_length = max_length
self._load_ids(ids_path)
def _load_ids(self, ids_path):
with open(ids_path, "rb") as ids_file:
self.ids = pickle.load(ids_file)
def _encode(self, sample):
return {
"idx": sample["idx"],
"text": torch.tensor(
self.tokenizer.encode(text=sample["text"], max_length=self.max_length, padding="max_length",
truncation=True)
),
"cls": sample["cls"]
}
def __len__(self):
return len(self.ids)
def __getitem__(self, idx):
sample_id = self.ids[idx]
return self._encode(
self.samples[sample_id]
)
| 25.756098
| 108
| 0.575758
|
9bb34fc506fd2d5d23c3c4198cd1c968ab01f4b2
| 35
|
py
|
Python
|
data.py
|
Tchepga/bidding
|
ea99b791629033402df01e20dcbf75ab11471491
|
[
"MIT"
] | null | null | null |
data.py
|
Tchepga/bidding
|
ea99b791629033402df01e20dcbf75ab11471491
|
[
"MIT"
] | null | null | null |
data.py
|
Tchepga/bidding
|
ea99b791629033402df01e20dcbf75ab11471491
|
[
"MIT"
] | null | null | null |
global encheres
encheres = dict()
| 8.75
| 17
| 0.742857
|
d6fd82548282fe93d7e73f4e9bc70c04ed427bef
| 16,809
|
py
|
Python
|
cinder/api/contrib/volume_actions.py
|
mail2nsrajesh/cinder
|
a688b872bec6d1abd4dcd852bdb8e8a921369d2e
|
[
"Apache-2.0"
] | null | null | null |
cinder/api/contrib/volume_actions.py
|
mail2nsrajesh/cinder
|
a688b872bec6d1abd4dcd852bdb8e8a921369d2e
|
[
"Apache-2.0"
] | null | null | null |
cinder/api/contrib/volume_actions.py
|
mail2nsrajesh/cinder
|
a688b872bec6d1abd4dcd852bdb8e8a921369d2e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_utils import encodeutils
from oslo_utils import strutils
import six
from six.moves import http_client
import webob
from cinder.api import extensions
from cinder.api.openstack import api_version_request
from cinder.api.openstack import wsgi
from cinder import exception
from cinder.i18n import _
from cinder.image import image_utils
from cinder import keymgr
from cinder import utils
from cinder import volume
CONF = cfg.CONF
def authorize(context, action_name):
action = 'volume_actions:%s' % action_name
extensions.extension_authorizer('volume', action)(context)
class VolumeActionsController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(VolumeActionsController, self).__init__(*args, **kwargs)
self._key_mgr = None
self.volume_api = volume.API()
@property
def _key_manager(self):
# Allows for lazy initialization of the key manager
if self._key_mgr is None:
self._key_mgr = keymgr.API(CONF)
return self._key_mgr
@wsgi.action('os-attach')
def _attach(self, req, id, body):
"""Add attachment metadata."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
# instance uuid is an option now
instance_uuid = None
if 'instance_uuid' in body['os-attach']:
instance_uuid = body['os-attach']['instance_uuid']
host_name = None
# Keep API backward compatibility
if 'host_name' in body['os-attach']:
host_name = body['os-attach']['host_name']
if 'mountpoint' not in body['os-attach']:
msg = _("Must specify 'mountpoint'")
raise webob.exc.HTTPBadRequest(explanation=msg)
mountpoint = body['os-attach']['mountpoint']
if 'mode' in body['os-attach']:
mode = body['os-attach']['mode']
else:
mode = 'rw'
if instance_uuid is None and host_name is None:
msg = _("Invalid request to attach volume to an invalid target")
raise webob.exc.HTTPBadRequest(explanation=msg)
if mode not in ('rw', 'ro'):
msg = _("Invalid request to attach volume with an invalid mode. "
"Attaching mode should be 'rw' or 'ro'")
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
self.volume_api.attach(context, volume,
instance_uuid, host_name, mountpoint, mode)
except messaging.RemoteError as error:
if error.exc_type in ['InvalidVolume', 'InvalidUUID',
'InvalidVolumeAttachMode']:
msg = _("Error attaching volume - %(err_type)s: "
"%(err_msg)s") % {
'err_type': error.exc_type, 'err_msg': error.value}
raise webob.exc.HTTPBadRequest(explanation=msg)
else:
# There are also few cases where attach call could fail due to
# db or volume driver errors. These errors shouldn't be exposed
# to the user and in such cases it should raise 500 error.
raise
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.action('os-detach')
def _detach(self, req, id, body):
"""Clear attachment metadata."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
attachment_id = None
if body['os-detach']:
attachment_id = body['os-detach'].get('attachment_id', None)
try:
self.volume_api.detach(context, volume, attachment_id)
except messaging.RemoteError as error:
if error.exc_type in ['VolumeAttachmentNotFound', 'InvalidVolume']:
msg = _("Error detaching volume - %(err_type)s: "
"%(err_msg)s") % {
'err_type': error.exc_type, 'err_msg': error.value}
raise webob.exc.HTTPBadRequest(explanation=msg)
else:
# There are also few cases where detach call could fail due to
# db or volume driver errors. These errors shouldn't be exposed
# to the user and in such cases it should raise 500 error.
raise
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.action('os-reserve')
def _reserve(self, req, id, body):
"""Mark volume as reserved."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
self.volume_api.reserve_volume(context, volume)
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.action('os-unreserve')
def _unreserve(self, req, id, body):
"""Unmark volume as reserved."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
self.volume_api.unreserve_volume(context, volume)
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.action('os-begin_detaching')
def _begin_detaching(self, req, id, body):
"""Update volume status to 'detaching'."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
self.volume_api.begin_detaching(context, volume)
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.action('os-roll_detaching')
def _roll_detaching(self, req, id, body):
"""Roll back volume status to 'in-use'."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
self.volume_api.roll_detaching(context, volume)
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.action('os-initialize_connection')
def _initialize_connection(self, req, id, body):
"""Initialize volume attachment."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
try:
connector = body['os-initialize_connection']['connector']
except KeyError:
raise webob.exc.HTTPBadRequest(
explanation=_("Must specify 'connector'"))
try:
info = self.volume_api.initialize_connection(context,
volume,
connector)
except exception.InvalidInput as err:
raise webob.exc.HTTPBadRequest(
explanation=err.msg)
except exception.VolumeBackendAPIException:
msg = _("Unable to fetch connection information from backend.")
raise webob.exc.HTTPInternalServerError(explanation=msg)
except messaging.RemoteError as error:
if error.exc_type == 'InvalidInput':
raise exception.InvalidInput(reason=error.value)
raise
return {'connection_info': info}
@wsgi.action('os-terminate_connection')
def _terminate_connection(self, req, id, body):
"""Terminate volume attachment."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
try:
connector = body['os-terminate_connection']['connector']
except KeyError:
raise webob.exc.HTTPBadRequest(
explanation=_("Must specify 'connector'"))
try:
self.volume_api.terminate_connection(context, volume, connector)
except exception.VolumeBackendAPIException:
msg = _("Unable to terminate volume connection from backend.")
raise webob.exc.HTTPInternalServerError(explanation=msg)
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.response(http_client.ACCEPTED)
@wsgi.action('os-volume_upload_image')
def _volume_upload_image(self, req, id, body):
"""Uploads the specified volume to image service."""
context = req.environ['cinder.context']
params = body['os-volume_upload_image']
req_version = req.api_version_request
if not params.get("image_name"):
msg = _("No image_name was specified in request.")
raise webob.exc.HTTPBadRequest(explanation=msg)
force = params.get('force', 'False')
try:
force = strutils.bool_from_string(force, strict=True)
except ValueError as error:
err_msg = encodeutils.exception_to_unicode(error)
msg = _("Invalid value for 'force': '%s'") % err_msg
raise webob.exc.HTTPBadRequest(explanation=msg)
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
authorize(context, "upload_image")
# check for valid disk-format
disk_format = params.get("disk_format", "raw")
if not image_utils.validate_disk_format(disk_format):
msg = _("Invalid disk-format '%(disk_format)s' is specified. "
"Allowed disk-formats are %(allowed_disk_formats)s.") % {
"disk_format": disk_format,
"allowed_disk_formats": ", ".join(
image_utils.VALID_DISK_FORMATS)
}
raise webob.exc.HTTPBadRequest(explanation=msg)
image_metadata = {"container_format": params.get(
"container_format", "bare"),
"disk_format": disk_format,
"name": params["image_name"]}
if volume.encryption_key_id:
# Clone volume encryption key: the current key cannot
# be reused because it will be deleted when the volume is
# deleted.
# TODO(eharney): Currently, there is no mechanism to remove
# these keys, because Glance will not delete the key from
# Barbican when the image is deleted.
encryption_key_id = self._key_manager.store(
context,
self._key_manager.get(context, volume.encryption_key_id))
image_metadata['cinder_encryption_key_id'] = encryption_key_id
if req_version >= api_version_request.APIVersionRequest('3.1'):
image_metadata['visibility'] = params.get('visibility', 'private')
image_metadata['protected'] = params.get('protected', 'False')
if image_metadata['visibility'] == 'public':
authorize(context, 'upload_public')
if CONF.glance_api_version != 2:
# Replace visibility with is_public for Glance V1
image_metadata['is_public'] = (
image_metadata['visibility'] == 'public')
image_metadata.pop('visibility', None)
image_metadata['protected'] = (
utils.get_bool_param('protected', image_metadata))
try:
response = self.volume_api.copy_volume_to_image(context,
volume,
image_metadata,
force)
except exception.InvalidVolume as error:
raise webob.exc.HTTPBadRequest(explanation=error.msg)
except ValueError as error:
raise webob.exc.HTTPBadRequest(explanation=six.text_type(error))
except messaging.RemoteError as error:
msg = "%(err_type)s: %(err_msg)s" % {'err_type': error.exc_type,
'err_msg': error.value}
raise webob.exc.HTTPBadRequest(explanation=msg)
except Exception as error:
raise webob.exc.HTTPBadRequest(explanation=six.text_type(error))
return {'os-volume_upload_image': response}
@wsgi.action('os-extend')
def _extend(self, req, id, body):
"""Extend size of volume."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
try:
size = int(body['os-extend']['new_size'])
except (KeyError, ValueError, TypeError):
msg = _("New volume size must be specified as an integer.")
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
self.volume_api.extend(context, volume, size)
except exception.InvalidVolume as error:
raise webob.exc.HTTPBadRequest(explanation=error.msg)
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.action('os-update_readonly_flag')
def _volume_readonly_update(self, req, id, body):
"""Update volume readonly flag."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
try:
readonly_flag = body['os-update_readonly_flag']['readonly']
except KeyError:
msg = _("Must specify readonly in request.")
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
readonly_flag = strutils.bool_from_string(readonly_flag,
strict=True)
except ValueError as error:
err_msg = encodeutils.exception_to_unicode(error)
msg = _("Invalid value for 'readonly': '%s'") % err_msg
raise webob.exc.HTTPBadRequest(explanation=msg)
self.volume_api.update_readonly_flag(context, volume, readonly_flag)
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.action('os-retype')
def _retype(self, req, id, body):
"""Change type of existing volume."""
context = req.environ['cinder.context']
volume = self.volume_api.get(context, id)
try:
new_type = body['os-retype']['new_type']
except KeyError:
msg = _("New volume type must be specified.")
raise webob.exc.HTTPBadRequest(explanation=msg)
policy = body['os-retype'].get('migration_policy')
self.volume_api.retype(context, volume, new_type, policy)
return webob.Response(status_int=http_client.ACCEPTED)
@wsgi.action('os-set_bootable')
def _set_bootable(self, req, id, body):
"""Update bootable status of a volume."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self.volume_api.get(context, id)
try:
bootable = body['os-set_bootable']['bootable']
except KeyError:
msg = _("Must specify bootable in request.")
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
bootable = strutils.bool_from_string(bootable,
strict=True)
except ValueError as error:
err_msg = encodeutils.exception_to_unicode(error)
msg = _("Invalid value for 'bootable': '%s'") % err_msg
raise webob.exc.HTTPBadRequest(explanation=msg)
update_dict = {'bootable': bootable}
self.volume_api.update(context, volume, update_dict)
return webob.Response(status_int=http_client.OK)
class Volume_actions(extensions.ExtensionDescriptor):
"""Enable volume actions."""
name = "VolumeActions"
alias = "os-volume-actions"
updated = "2012-05-31T00:00:00+00:00"
def get_controller_extensions(self):
controller = VolumeActionsController()
extension = extensions.ControllerExtension(self, 'volumes', controller)
return [extension]
| 41.709677
| 79
| 0.619133
|
175780f9c6671ba001414f995dc256d5e8bd2e41
| 3,257
|
py
|
Python
|
nova/api/openstack/compute/plugins/v3/certificates.py
|
bopopescu/OpenStack-DNRM-Nova
|
7354f378398850113ac93b511547ed05218dc770
|
[
"Apache-2.0"
] | null | null | null |
nova/api/openstack/compute/plugins/v3/certificates.py
|
bopopescu/OpenStack-DNRM-Nova
|
7354f378398850113ac93b511547ed05218dc770
|
[
"Apache-2.0"
] | 1
|
2018-01-19T07:50:49.000Z
|
2018-01-19T07:50:49.000Z
|
nova/api/openstack/compute/plugins/v3/certificates.py
|
bopopescu/OpenStack-DNRM-Nova
|
7354f378398850113ac93b511547ed05218dc770
|
[
"Apache-2.0"
] | 1
|
2020-07-24T08:49:47.000Z
|
2020-07-24T08:49:47.000Z
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License
import webob.exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
import nova.cert.rpcapi
from nova import network
ALIAS = "os-certificates"
authorize = extensions.extension_authorizer('compute', 'v3:' + ALIAS)
def make_certificate(elem):
elem.set('data')
elem.set('private_key')
class CertificateTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('certificate',
selector='certificate')
make_certificate(root)
return xmlutil.MasterTemplate(root, 1)
def _translate_certificate_view(certificate, private_key=None):
return {
'data': certificate,
'private_key': private_key,
}
class CertificatesController(object):
"""The x509 Certificates API controller for the OpenStack API."""
def __init__(self):
self.network_api = network.API()
self.cert_rpcapi = nova.cert.rpcapi.CertAPI()
super(CertificatesController, self).__init__()
@wsgi.serializers(xml=CertificateTemplate)
def show(self, req, id):
"""Return certificate information."""
context = req.environ['nova.context']
authorize(context)
if id != 'root':
msg = _("Only root certificate can be retrieved.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
cert = self.cert_rpcapi.fetch_ca(context,
project_id=context.project_id)
return {'certificate': _translate_certificate_view(cert)}
@wsgi.serializers(xml=CertificateTemplate)
def create(self, req, body=None):
"""Create a certificate."""
context = req.environ['nova.context']
authorize(context)
pk, cert = self.cert_rpcapi.generate_x509_cert(context,
user_id=context.user_id, project_id=context.project_id)
context = req.environ['nova.context']
return {'certificate': _translate_certificate_view(cert, pk)}
class Certificates(extensions.V3APIExtensionBase):
"""Certificates support."""
name = "Certificates"
alias = ALIAS
namespace = ("http://docs.openstack.org/compute/ext/"
"certificates/api/v3")
version = 1
def get_resources(self):
resources = [
extensions.ResourceExtension('os-certificates',
CertificatesController(),
member_actions={})]
return resources
def get_controller_extensions(self):
return []
| 33.234694
| 78
| 0.663187
|
b7021ce5b0a0f8e859cb233be295bbf48237ca48
| 1,825
|
py
|
Python
|
initrd/usr/lib64/python2.6/site-packages/rpm/__init__.py
|
OpenCloudOS/OpenCloudOS-tools
|
06b12aab3182f4207d78a5d8733be03f0d7b69a4
|
[
"MulanPSL-1.0"
] | 8
|
2021-11-26T06:19:06.000Z
|
2022-01-11T01:30:11.000Z
|
initrd/usr/lib64/python2.6/site-packages/rpm/__init__.py
|
OpenCloudOS/OpenCloudOS-tools
|
06b12aab3182f4207d78a5d8733be03f0d7b69a4
|
[
"MulanPSL-1.0"
] | null | null | null |
initrd/usr/lib64/python2.6/site-packages/rpm/__init__.py
|
OpenCloudOS/OpenCloudOS-tools
|
06b12aab3182f4207d78a5d8733be03f0d7b69a4
|
[
"MulanPSL-1.0"
] | 2
|
2021-12-21T08:36:02.000Z
|
2021-12-21T08:55:38.000Z
|
r"""RPM Module
This module enables you to manipulate rpms and the rpm database.
"""
import warnings
import os
from rpm._rpm import *
from rpm.transaction import *
import rpm._rpm as _rpm
_RPMVSF_NODIGESTS = _rpm._RPMVSF_NODIGESTS
_RPMVSF_NOHEADER = _rpm._RPMVSF_NOHEADER
_RPMVSF_NOPAYLOAD = _rpm._RPMVSF_NOPAYLOAD
_RPMVSF_NOSIGNATURES = _rpm._RPMVSF_NOSIGNATURES
# try to import build bits but dont require it
try:
from rpm._rpmb import *
except ImportError:
pass
# backwards compatibility + give the same class both ways
ts = TransactionSet
def headerLoad(*args, **kwds):
warnings.warn("Use rpm.hdr() instead.", DeprecationWarning, stacklevel=2)
return hdr(*args, **kwds)
def _doHeaderListFromFD(rpm_fd, retrofit):
hlist = []
while 1:
try:
h = hdr(rpm_fd)
if retrofit:
h.convert(HEADERCONV_RETROFIT_V3)
hlist.append(h)
except _rpm.error:
break
return hlist
def readHeaderListFromFD(file_desc, retrofit = True):
if not isinstance(file_desc, fd):
file_desc = fd(file_desc)
return _doHeaderListFromFD(file_desc, retrofit)
def readHeaderListFromFile(path, retrofit = True):
f = fd(path)
hlist = _doHeaderListFromFD(f, retrofit)
f.close()
return hlist
def readHeaderFromFD(file_desc):
if not isinstance(file_desc, fd):
file_desc = fd(file_desc)
try:
offset = file_desc.tell()
h = hdr(file_desc)
except (_rpm.error, IOError):
offset = None
h = None
return (h, offset)
def signalsCaught(siglist):
caught = []
for sig in siglist:
if signalCaught(sig):
caught.append(sig)
return caught
def dsSingle(TagN, N, EVR = "", Flags = RPMSENSE_ANY):
return ds((N, EVR, Flags), TagN)
| 24.013158
| 77
| 0.664658
|
3ece285a1714e3bf7eae2a8672cf7ad713689b52
| 5,000
|
py
|
Python
|
gmprocess/subcommands/generate_report.py
|
meramossepu1/groundmotion-processing
|
5cc19023b94e5b5b718590ce8cd05a22a4088a67
|
[
"Unlicense"
] | 54
|
2019-01-12T02:05:38.000Z
|
2022-03-29T19:43:56.000Z
|
gmprocess/subcommands/generate_report.py
|
meramossepu1/groundmotion-processing
|
5cc19023b94e5b5b718590ce8cd05a22a4088a67
|
[
"Unlicense"
] | 700
|
2018-12-18T19:44:31.000Z
|
2022-03-30T20:54:28.000Z
|
gmprocess/subcommands/generate_report.py
|
meramossepu1/groundmotion-processing
|
5cc19023b94e5b5b718590ce8cd05a22a4088a67
|
[
"Unlicense"
] | 41
|
2018-11-29T23:17:56.000Z
|
2022-03-31T04:04:23.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import logging
from gmprocess.subcommands.lazy_loader import LazyLoader
distributed = LazyLoader("distributed", globals(), "dask.distributed")
base = LazyLoader("base", globals(), "gmprocess.subcommands.base")
arg_dicts = LazyLoader("arg_dicts", globals(), "gmprocess.subcommands.arg_dicts")
ws = LazyLoader("ws", globals(), "gmprocess.io.asdf.stream_workspace")
report = LazyLoader("report", globals(), "gmprocess.io.report")
plot = LazyLoader("plot", globals(), "gmprocess.utils.plot")
const = LazyLoader("const", globals(), "gmprocess.utils.constants")
class GenerateReportModule(base.SubcommandModule):
"""Generate summary report (latex required)."""
command_name = "generate_report"
aliases = ("report",)
arguments = [
arg_dicts.ARG_DICTS["eventid"],
arg_dicts.ARG_DICTS["textfile"],
arg_dicts.ARG_DICTS["label"],
arg_dicts.ARG_DICTS["num_processes"],
]
def main(self, gmrecords):
"""Generate summary report.
This function generates summary plots and then combines them into a
report with latex. If latex (specifically `pdflatex`) is not found on
the system then the PDF report will not be generated but the
constituent plots will be available.
Args:
gmrecords:
GMrecordsApp instance.
"""
logging.info("Running subcommand '%s'" % self.command_name)
self.gmrecords = gmrecords
self._check_arguments()
self._get_events()
for event in self.events:
event_dir = os.path.join(self.gmrecords.data_path, event.id)
pstreams = self.generate_diagnostic_plots(event)
logging.info("Generating summary report for event %s..." % event.id)
build_conf = gmrecords.conf["build_report"]
report_format = build_conf["format"]
if report_format == "latex":
report_file, success = report.build_report_latex(
pstreams,
event_dir,
event,
prefix="%s_%s" % (gmrecords.project, gmrecords.args.label),
config=gmrecords.conf,
gmprocess_version=gmrecords.gmprocess_version,
)
else:
report_file = ""
success = False
if os.path.isfile(report_file) and success:
self.append_file("Summary report", report_file)
self._summarize_files_created()
def generate_diagnostic_plots(self, event):
event_dir = os.path.join(self.gmrecords.data_path, event.id)
workname = os.path.join(event_dir, const.WORKSPACE_NAME)
if not os.path.isfile(workname):
logging.info(
"No workspace file found for event %s. Please run "
"subcommand 'assemble' to generate workspace file." % event.id
)
logging.info("Continuing to next event.")
return False
self.workspace = ws.StreamWorkspace.open(workname)
ds = self.workspace.dataset
station_list = ds.waveforms.list()
self._get_labels()
if len(station_list) == 0:
logging.info("No processed waveforms available. No report generated.")
return False
if self.gmrecords.args.num_processes > 0:
futures = []
client = distributed.Client(
threads_per_worker=1, n_workers=self.gmrecords.args.num_processes
)
logging.info("Creating diagnostic plots for event %s..." % event.id)
plot_dir = os.path.join(event_dir, "plots")
if not os.path.isdir(plot_dir):
os.makedirs(plot_dir)
results = []
pstreams = []
for station_id in station_list:
streams = self.workspace.getStreams(
event.id,
stations=[station_id],
labels=[self.gmrecords.args.label],
config=self.gmrecords.conf,
)
if not len(streams):
raise ValueError("No matching streams found.")
for stream in streams:
pstreams.append(stream)
if self.gmrecords.args.num_processes > 0:
future = client.submit(plot.summary_plots, stream, plot_dir, event)
futures.append(future)
else:
results.append(plot.summary_plots(stream, plot_dir, event))
if self.gmrecords.args.num_processes > 0:
# Collect the results??
results = [future.result() for future in futures]
client.shutdown()
moveoutfile = os.path.join(event_dir, "moveout_plot.png")
plot.plot_moveout(pstreams, event.latitude, event.longitude, file=moveoutfile)
self.append_file("Moveout plot", moveoutfile)
self.workspace.close()
return pstreams
| 36.231884
| 87
| 0.6016
|
5c6372f7468b197754b5d863124b14294a859075
| 1,153
|
py
|
Python
|
src/nodemgr/common/utils.py
|
atsgen/tf-controller
|
9321889cdd3d7108980cc88937b2e82956502cc5
|
[
"Apache-2.0"
] | 37
|
2020-09-21T10:42:26.000Z
|
2022-01-09T10:16:40.000Z
|
src/nodemgr/common/utils.py
|
atsgen/tf-controller
|
9321889cdd3d7108980cc88937b2e82956502cc5
|
[
"Apache-2.0"
] | null | null | null |
src/nodemgr/common/utils.py
|
atsgen/tf-controller
|
9321889cdd3d7108980cc88937b2e82956502cc5
|
[
"Apache-2.0"
] | 21
|
2020-08-25T12:48:42.000Z
|
2022-03-22T04:32:18.000Z
|
#
# Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
#
import os
def get_package_version(pkg):
# retrieve current installed version of pkg
try:
cmd = 'rpm -q --qf "%%{VERSION}-%%{RELEASE}" %s' % pkg
return os.popen(cmd).read()
except Exception:
return None
def is_running_in_docker():
return os.path.exists('/.dockerenv')
def is_running_in_podman():
pid = os.getpid()
with open('/proc/{}/cgroup'.format(pid), 'rt') as ifh:
return 'libpod' in ifh.read()
def is_running_in_kubepod():
pid = os.getpid()
with open('/proc/{}/cgroup'.format(pid), 'rt') as ifh:
return 'kubepods' in ifh.read()
def is_running_in_containerd():
pid = os.getpid()
with open('/proc/{}/cgroup'.format(pid), 'rt') as ifh:
return 'containerd' in ifh.read()
def get_memory_cgroup(pid_):
with open('/proc/{}/cgroup'.format(pid_), 'r') as f:
while True:
line = f.readline()
if not line:
break
value = line.partition(':memory:')[2]
if value:
return value.strip()
return None
| 22.607843
| 64
| 0.58196
|
7703cb3eb79c5a97d7d9f1d8efe4697ad347012b
| 1,609
|
py
|
Python
|
milieu/data/go.py
|
seyuboglu/milieu
|
dfcd23ecb4449e6fa5debf8ed371fbc5d5c22e8e
|
[
"Apache-2.0"
] | 1
|
2020-07-15T18:52:29.000Z
|
2020-07-15T18:52:29.000Z
|
milieu/data/go.py
|
seyuboglu/milieu
|
dfcd23ecb4449e6fa5debf8ed371fbc5d5c22e8e
|
[
"Apache-2.0"
] | null | null | null |
milieu/data/go.py
|
seyuboglu/milieu
|
dfcd23ecb4449e6fa5debf8ed371fbc5d5c22e8e
|
[
"Apache-2.0"
] | null | null | null |
"""GO Ontology data methods."""
import logging
import os
import json
import datetime
import time
import pickle
from collections import defaultdict
from multiprocessing import Pool
import numpy as np
from scipy.stats import spearmanr
import pandas as pd
from tqdm import tqdm
from goatools.obo_parser import GODag
from goatools.associations import read_ncbi_gene2go
from goatools.go_enrichment import GOEnrichmentStudy
def load_go_annotations(proteins,
levels=None,
obodag_path="data/go/go-basic.obo",
entrez_to_go_path="data/go/gene2go.txt"):
"""
args:
@proteins (iterable) proteins to get annotations for
@levels (list(int)) the levels of the ontology
@obodag (str) path obo file
@entrez_to_go_path (str) path to mapping from entrez ids to go doids
return:
@term_to_proteins (dict) map from term
"""
obodag = GODag(obodag_path)
entrez_to_go = read_ncbi_gene2go(entrez_to_go_path, taxids=[9606])
def get_annotations(protein, levels):
"""
"""
terms = set()
doids = entrez_to_go[protein]
for doid in doids:
for parent in obodag[doid].get_all_parents():
if levels is None or obodag[parent].level in levels:
terms.add(obodag[parent].name)
return terms
term_to_proteins = defaultdict(set)
for protein in proteins:
terms = get_annotations(protein, levels)
for term in terms:
term_to_proteins[term].add(protein)
return term_to_proteins
| 29.254545
| 72
| 0.661902
|
f16f00ff850669ce5c1caf9b939bf055749da7c3
| 1,111
|
py
|
Python
|
office365/sharepoint/webs/remote_web.py
|
andreas-j-hauser/Office365-REST-Python-Client
|
4bf8ee0b65985980b50fc3b74b32fd2db34561ba
|
[
"MIT"
] | null | null | null |
office365/sharepoint/webs/remote_web.py
|
andreas-j-hauser/Office365-REST-Python-Client
|
4bf8ee0b65985980b50fc3b74b32fd2db34561ba
|
[
"MIT"
] | null | null | null |
office365/sharepoint/webs/remote_web.py
|
andreas-j-hauser/Office365-REST-Python-Client
|
4bf8ee0b65985980b50fc3b74b32fd2db34561ba
|
[
"MIT"
] | null | null | null |
from office365.onedrive.list import List
from office365.runtime.client_object import ClientObject
from office365.runtime.queries.service_operation_query import ServiceOperationQuery
from office365.runtime.resource_path import ResourcePath
from office365.sharepoint.webs.web import Web
class RemoteWeb(ClientObject):
"""Specifies a remote web that might be on a different domain."""
def get_list_by_server_relative_url(self, serverRelativeUrl):
target_list = List(self.context)
qry = ServiceOperationQuery(self, "GetListByServerRelativeUrl", [serverRelativeUrl], None, None, target_list)
self.context.add_query(qry)
return target_list
@staticmethod
def create(context, requestUrl):
remote_web = RemoteWeb(context)
qry = ServiceOperationQuery(context, None, [requestUrl], None, None, remote_web)
qry.static = True
context.add_query(qry)
return remote_web
@property
def web(self):
"""Gets the SPWeb."""
return self.properties.get('Web', Web(self.context, ResourcePath("Web", self.resource_path)))
| 38.310345
| 117
| 0.733573
|
dd5e32493c35b1effdde08f940e8c95ffb06a901
| 10,152
|
py
|
Python
|
model.py
|
yashbonde/NIPS-Generator
|
270ad015b087f91ebffc340ffaf818108943991d
|
[
"MIT"
] | 2
|
2019-08-16T13:28:17.000Z
|
2019-09-10T06:40:12.000Z
|
model.py
|
yashbonde/NIPS-Generator
|
270ad015b087f91ebffc340ffaf818108943991d
|
[
"MIT"
] | null | null | null |
model.py
|
yashbonde/NIPS-Generator
|
270ad015b087f91ebffc340ffaf818108943991d
|
[
"MIT"
] | null | null | null |
"""
tf_layers.py
Layer functions for the GPT-2 based model
27.09.2019 - @yashbonde
"""
import numpy as np
import tensorflow as tf
def gelu_activation(inp):
"""
Gaussian Error Linear Unit (GELU) is a new type of activation function that can
estimate any of the existing activation values such as Sigmoid, ReLU, ELU, tanh
while providing superior learning.
See this [paper](https://arxiv.org/pdf/1606.08415.pdf)
:param inp: input tensor
:return:
"""
out = 1 + tf.tanh(np.sqrt(np.pi) * (inp + 0.044715 * tf.pow(inp, 3)))
out *= 0.5 * inp
return out
def shapes_list(inp):
"""
cleaner handling of tensorflow shapes
:param inp: input tensor
:return: list of shapes combining dynamic and static shapes
"""
shapes_static = inp.get_shape().as_list()
shapes_dynamic = tf.shape(inp)
cleaned_shape = [shapes_dynamic[i] if s is None else s for i, s in enumerate(shapes_static)]
return cleaned_shape
def softmax_with_reduce_max(inp, axis=-1):
"""
perform softmax, this is slightly different to the default softmax in tensorflow
:param inp:
:param axis:
:return:
"""
out = inp - tf.reduce_max(inp, axis=axis, keepdims=True)
ex = tf.exp(out)
sm = ex / tf.reduce_sum(ex, axis=axis, keepdims=True)
return sm
def normalise_tensor(inp, scope, *, axis=-1, epsilon=1e-5):
"""
Normalize the input values between 0 and 1, then do diagonal affine transform
:param inp: input tensor
:param scope: tf variable scope
:param axis: axis to perform ops on
:param epsilon: base minimum value
:return: normalised tensor
"""
with tf.variable_scope(scope):
e_dim = inp.get_shape().as_list()[-1]
g = tf.get_variable('g', [e_dim], initializer=tf.constant_initializer(1))
b = tf.get_variable('b', [e_dim], initializer=tf.constant_initializer(0))
u = tf.reduce_mean(inp, axis=axis, keepdims=True)
s = tf.reduce_mean(tf.square(inp - u), axis=axis, keepdims=True)
out = (inp - u) * tf.rsqrt(s + epsilon)
out = out * g + b
return out
def split_into_n_states(inp, n):
"""2
reshape last dimension of input tensor from n --> [n, inp.shape[-1]/n]
:param inp: input tensor
:param n: number of splits
:return: reshaped tensor
"""
*start, m = shapes_list(inp)
out = tf.reshape(inp, start + [n, m // n])
return out
def merge_n_states(inp):
"""
merge the last two dimensions
:param inp: input tensor
:return: reshaped tensor
"""
*start, m, n = shapes_list(inp)
out = tf.reshape(inp, start + [m * n])
return out
def conv1d(inp, scope, num_features, weights_init_stddev=0.2):
"""
1D convolutional block, first reshape input then matmul weights and then reshape
:param inp: input tensor
:param scope: tf variable scope
:param num_features: number of output features
:param weights_init_stddev: standard deviation value
:return: processed output
"""
with tf.variable_scope(scope):
*start, nx = shapes_list(inp)
weights = tf.get_variable('w', [1, nx, num_features],
initializer=tf.random_normal_initializer(stddev=weights_init_stddev))
bias = tf.get_variable('b', [num_features],
initializer=tf.constant_initializer(0))
# reshape input and weights and perform matmul and add bias
inp_reshaped = tf.reshape(inp, [-1, nx])
w_reshaped = tf.reshape(weights, [-1, num_features])
out = tf.matmul(inp_reshaped, w_reshaped) + bias
out = tf.reshape(out, start + [num_features])
return out
def attention_mask(nd, ns, dtype=tf.float32):
"""
1's in the lower traingle, couting from lower right corner
This is same as using the tf.matrix_band_part() but it doesn't produce garbage on TPUs
:param nd:
:param ns:
:param dtype:
:return:
"""
i = tf.range(nd)[:, None]
j = tf.range(ns)
m = i >= j - ns + nd
out = tf.cast(m, dtype)
return out
def attention(inp, scope, e_dim, past, config):
"""
complete attention model in a single function
:param inp: input tensor
:param scope: tf variable scope
:param e_dim: embedding dimension value
:param past: previous outputs ??
:param config: config file
:return: attention value and present value
"""
assert inp.shape.ndims == 3 # input should be of shape [batch, seqlen, embeddings] # [batch, sequence, features]
assert e_dim % config.num_heads == 0 # embedding can be split in heads
if past is not None:
assert past.shape.ndims == 5 # [batch, 2, heads, seqlen, emebeddings]
def split_heads(x):
out = split_into_n_states(x, config.num_heads)
out = tf.transpose(out, [0, 2, 1, 3])
return out
def merge_heads(x):
out = merge_n_states(tf.transpose(x, [0, 2, 1, 3]))
return out
def mask_attention_weights(w):
# w should have shape [batches, heads, dst_seq, src_seq], where information flows from scr to dst
_, _, nd, ns = shapes_list(w)
b = attention_mask(nd, ns, w.dtype)
b = tf.reshape(b, [1, 1, nd, ns])
w = w * b - tf.cast(1e10, w.dtype) * (1 - b)
return w
def multihead_attention(q, k, v):
w = tf.matmul(q, k, transpose_b=True)
w *= tf.rsqrt(tf.cast(v.shape[-1].value, w.dtype))
# mask attention weights
w = mask_attention_weights(w)
w = softmax_with_reduce_max(w)
out = tf.matmul(w, v)
return out
with tf.variable_scope(scope):
c = conv1d(inp, 'convolutional_attention', e_dim * 3)
q, k, v = map(split_heads, tf.split(c, 3, axis=2))
present = tf.stack([k, v], axis=1)
if past is not None:
# there is a stack below it
pk, pv = tf.unstack(past, axis=1)
k = tf.concat([pk, k], axis=2)
v = tf.concat([pv, v], axis=2)
attn = multihead_attention(q, k, v)
attn = merge_heads(attn)
out = conv1d(attn, 'convolutional_projection', e_dim)
return out, present
def multilayer_perceptron(inp, scope, hidden_dim):
"""
MLP
:param inp: input tensor
:param scope: tf variable scope
:param hidden_dim: hidden dimension
:return: output processed tensor
"""
with tf.variable_scope(scope):
nx = inp.shape[-1].value
out = conv1d(inp, 'convolutional_ff', hidden_dim)
out = gelu_activation(out)
out = conv1d(out, 'convolutional_projection', nx)
return out
def block(inp, scope, past, config):
"""
one stack or block with multihead attention and ff block
:param inp: input tensor
:param scope: tf variable scope
:param past: past tensors
:param config: config object
:return: processed output and
"""
with tf.variable_scope(scope):
nx = inp.shape[-1].value
norm = normalise_tensor(inp, 'ln_1')
attn, present = attention(norm, 'attn', nx, past=past, config=config)
out = attn + inp
norm = normalise_tensor(out, 'ln_2')
mlp_out = multilayer_perceptron(norm, 'mlp', nx * 4) # note that hidden dim is 4x
out = out + mlp_out
return out, present
def past_shape(config, seqlen=None):
"""
return a list with shape of `past` tensor
:param config: config object
:return: list with shape value
"""
shape = [config.batch_size, config.num_layers, 2, config.num_heads, seqlen,
config.embedding_dim // config.num_heads]
return shape
def expand_tile(value, size):
"""
expand value to size
:param value: input object to be tiles
:param size: size to tile the object to
:return: tiled output
"""
value = tf.convert_to_tensor(value, name='value')
ndims = value.shape.ndims
out = tf.expand_dims(value, axis=0)
out = tf.tile(out, [size] + [1, ] * ndims)
return out
def positions_for(tokens, past_length):
"""
get positions only for a input tokens
:param tokens: input tokens
:param past_length: length of past object
:return: output
"""
batch_size = tf.shape(tokens)[0]
nsteps = tf.shape(tokens)[1]
out = expand_tile(past_length + tf.range(nsteps), batch_size)
return out
def model(config, inp, past=None, scope='model', reuse=False):
"""
Model function which returns one complete model
:param config: ModelConfig file
:param inp: input tensor for generation
:param past: any past tensors
:param scope: scope of the model
:param reuse: to reuse the model
:return: dictionary with two objects
"""
with tf.variable_scope(scope, reuse=reuse):
results = {}
batch_size, seqlen = shapes_list(inp)
wpe = tf.get_variable('wpe', [config.num_context, config.embedding_dim],
initializer=tf.random_normal_initializer(stddev=0.01))
wte = tf.get_variable('wte', [config.vocab_size, config.embedding_dim],
initializer=tf.random_normal_initializer(stddev=0.02))
past_length = 0 if past is None else tf.shape(past)[-2]
h = tf.gather(wte, inp) + tf.gather(wpe, positions_for(inp, past_length))
# Transformer
presents = [] # all the layer outputs
pasts = tf.unstack(past, axis=1) if past is not None else [None, ] * config.num_layers
assert len(pasts) == config.num_layers
for layer, past in enumerate(pasts):
h, present = block(h, 'stack_{}'.format(layer), past=past, config=config)
presents.append(present)
results['present'] = tf.stack(presents, axis=1)
out = normalise_tensor(h, 'ln_f')
# language model loss
h_flat = tf.reshape(out, [batch_size * seqlen, config.embedding_dim])
logits = tf.matmul(h_flat, wte, transpose_b=True)
logits = tf.reshape(logits, [batch_size, seqlen, config.vocab_size], name='logits')
results['logits'] = logits
return results
| 31.333333
| 117
| 0.627364
|
39bde199c543f4b16493ca87f62a882a3ba120b4
| 760
|
py
|
Python
|
algorithms/grokking_algorithms/binary_search.py
|
pyerbiz/re-exploring-python
|
c395d20a520dc8cf28ae5f7c9eaad9cf7f652145
|
[
"MIT"
] | null | null | null |
algorithms/grokking_algorithms/binary_search.py
|
pyerbiz/re-exploring-python
|
c395d20a520dc8cf28ae5f7c9eaad9cf7f652145
|
[
"MIT"
] | 1
|
2021-06-29T04:19:15.000Z
|
2021-06-29T04:19:15.000Z
|
algorithms/grokking_algorithms/binary_search.py
|
pyerbiz/re-exploring-python
|
c395d20a520dc8cf28ae5f7c9eaad9cf7f652145
|
[
"MIT"
] | 1
|
2021-09-06T23:16:30.000Z
|
2021-09-06T23:16:30.000Z
|
""" a module with multiple binary search object definitions"""
class BinarySearchBase:
def __init__(self, array, value) -> None:
self.array = array
self.value = value
def simple_binary_search(self):
"""take a list and a value, search for the value in the list
and return its index if found"""
first = 0
last = len(self.array) - 1
while last >= last:
mid = (last + first) // 2
fetched_val = self.array[mid]
if fetched_val == self.value:
return mid
elif fetched_val > self.value:
last = mid - 1
else:
first = mid + 1
print(f"didn't find the given value")
return None
| 26.206897
| 68
| 0.535526
|
9ca6a14995e96c0d0cb161793ec536b1005d2c79
| 2,192
|
py
|
Python
|
__scraping__/fcainfoweb.nic.in/main.py
|
whitmans-max/python-examples
|
881a8f23f0eebc76816a0078e19951893f0daaaa
|
[
"MIT"
] | 140
|
2017-02-21T22:49:04.000Z
|
2022-03-22T17:51:58.000Z
|
__scraping__/fcainfoweb.nic.in/main.py
|
whitmans-max/python-examples
|
881a8f23f0eebc76816a0078e19951893f0daaaa
|
[
"MIT"
] | 5
|
2017-12-02T19:55:00.000Z
|
2021-09-22T23:18:39.000Z
|
__scraping__/fcainfoweb.nic.in/main.py
|
whitmans-max/python-examples
|
881a8f23f0eebc76816a0078e19951893f0daaaa
|
[
"MIT"
] | 79
|
2017-01-25T10:53:33.000Z
|
2022-03-11T16:13:57.000Z
|
#!/usr/bin/env python3
# date: 2020.05.28
#
from selenium import webdriver
from selenium.webdriver.support.ui import Select
import pandas as pd
import time
# --- functions ---
def get_data(start_date, end_date, product):
# select `Variation Report`
driver.find_element_by_id('ctl00_MainContent_Rbl_Rpt_type_1').click()
# select `Daily Variant`
element_variation = driver.find_element_by_id ('ctl00_MainContent_Ddl_Rpt_Option1')
drop_variation = Select(element_variation)
drop_variation.select_by_visible_text('Daily Variation')
# select `product` before `date` because `end_date` opens calendar which blocks `product` list
element_commodity = driver.find_element_by_id ('ctl00_MainContent_Lst_Commodity')
drop_commodity = Select(element_commodity)
drop_commodity.select_by_visible_text(product)
# select `start_date` and `end_date`
driver.find_element_by_id('ctl00_MainContent_Txt_FrmDate').send_keys(start_date)
driver.find_element_by_id('ctl00_MainContent_Txt_ToDate').send_keys(end_date)
# click button `Get Data`
driver.find_element_by_id('ctl00_MainContent_btn_getdata1').click()
time.sleep(3) # sometimes it need to wait for loading page
#second table is the one that we want
table = pd.read_html(driver.page_source)[2]
print(len(table))
print(table)
# go back
driver.find_element_by_id('btn_back').click()
time.sleep(3) # sometimes it need to wait for loading page
return table
# --- main ---
driver = webdriver.Firefox()
driver.get('https://fcainfoweb.nic.in/Reports/Report_Menu_Web.aspx')
start_date = '01/05/2020'
end_date = '27/05/2020'
for number, product in enumerate( ('Rice', 'Wheat', 'Tomato', 'Sugar') ):
table = get_data(start_date, end_date, product)
# for first product create file, for other products append to existing file
if number == 0:
mode = 'w'
else:
mode = 'a'
# standard engine `xlsxwriter` can't append so I had to use `openpyxl`
with pd.ExcelWriter('output.xlsx', engine='openpyxl', mode=mode) as writer:
table.to_excel(writer, sheet_name=product, index=False)
| 31.314286
| 98
| 0.715785
|
9e4246844b1c9761968a8b8fb77417543f79d9d0
| 100
|
py
|
Python
|
app/user_manage/__init__.py
|
sargentfrancesca/flasky
|
0bea79fddb45f1279da7c468d1ae7f8a6a3e8c7f
|
[
"MIT"
] | 3
|
2017-12-12T19:38:07.000Z
|
2022-01-17T09:54:07.000Z
|
app/user_manage/__init__.py
|
sargentfrancesca/flasky
|
0bea79fddb45f1279da7c468d1ae7f8a6a3e8c7f
|
[
"MIT"
] | 117
|
2016-08-08T09:10:13.000Z
|
2017-01-31T15:19:51.000Z
|
app/user_manage/__init__.py
|
sargentfrancesca/flasky
|
0bea79fddb45f1279da7c468d1ae7f8a6a3e8c7f
|
[
"MIT"
] | 1
|
2020-05-01T08:36:42.000Z
|
2020-05-01T08:36:42.000Z
|
from flask import Blueprint
user_manage = Blueprint('admin_manage', __name__)
from . import views
| 16.666667
| 49
| 0.79
|
82387e8513eb1db2a2aa57d45bccbd6d944586ec
| 383
|
py
|
Python
|
intersectionarray2.py
|
kundan7kumar/AI_Challenge
|
0e510443f26fffeedc517572715083084827ae1a
|
[
"MIT"
] | null | null | null |
intersectionarray2.py
|
kundan7kumar/AI_Challenge
|
0e510443f26fffeedc517572715083084827ae1a
|
[
"MIT"
] | null | null | null |
intersectionarray2.py
|
kundan7kumar/AI_Challenge
|
0e510443f26fffeedc517572715083084827ae1a
|
[
"MIT"
] | null | null | null |
"""
Given two integer arrays nums1 and nums2, return an array of their intersection. Each element in the result must appear as many times as it shows in both arrays and you may return the result in any order.
"""
def intersectionarray2(nums1,nums2):
from collections import Counter
num1 = Counter(nums1)
num2 = Counter(nums2)
return list((num1 & num2).elements())
| 31.916667
| 204
| 0.733681
|
9752586e0652c3de21cd26eb50ef6ed94ce76018
| 7,133
|
py
|
Python
|
sdk/python/pulumi_azure_native/netapp/v20200901/get_snapshot_policy.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/netapp/v20200901/get_snapshot_policy.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/netapp/v20200901/get_snapshot_policy.py
|
pulumi-bot/pulumi-azure-native
|
f7b9490b5211544318e455e5cceafe47b628e12c
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetSnapshotPolicyResult',
'AwaitableGetSnapshotPolicyResult',
'get_snapshot_policy',
]
@pulumi.output_type
class GetSnapshotPolicyResult:
"""
Snapshot policy information
"""
def __init__(__self__, daily_schedule=None, enabled=None, hourly_schedule=None, id=None, location=None, monthly_schedule=None, name=None, provisioning_state=None, tags=None, type=None, weekly_schedule=None):
if daily_schedule and not isinstance(daily_schedule, dict):
raise TypeError("Expected argument 'daily_schedule' to be a dict")
pulumi.set(__self__, "daily_schedule", daily_schedule)
if enabled and not isinstance(enabled, bool):
raise TypeError("Expected argument 'enabled' to be a bool")
pulumi.set(__self__, "enabled", enabled)
if hourly_schedule and not isinstance(hourly_schedule, dict):
raise TypeError("Expected argument 'hourly_schedule' to be a dict")
pulumi.set(__self__, "hourly_schedule", hourly_schedule)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if monthly_schedule and not isinstance(monthly_schedule, dict):
raise TypeError("Expected argument 'monthly_schedule' to be a dict")
pulumi.set(__self__, "monthly_schedule", monthly_schedule)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if weekly_schedule and not isinstance(weekly_schedule, dict):
raise TypeError("Expected argument 'weekly_schedule' to be a dict")
pulumi.set(__self__, "weekly_schedule", weekly_schedule)
@property
@pulumi.getter(name="dailySchedule")
def daily_schedule(self) -> Optional['outputs.DailyScheduleResponse']:
"""
Schedule for daily snapshots
"""
return pulumi.get(self, "daily_schedule")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
The property to decide policy is enabled or not
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="hourlySchedule")
def hourly_schedule(self) -> Optional['outputs.HourlyScheduleResponse']:
"""
Schedule for hourly snapshots
"""
return pulumi.get(self, "hourly_schedule")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="monthlySchedule")
def monthly_schedule(self) -> Optional['outputs.MonthlyScheduleResponse']:
"""
Schedule for monthly snapshots
"""
return pulumi.get(self, "monthly_schedule")
@property
@pulumi.getter
def name(self) -> str:
"""
Snapshot policy name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Azure lifecycle management
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="weeklySchedule")
def weekly_schedule(self) -> Optional['outputs.WeeklyScheduleResponse']:
"""
Schedule for weekly snapshots
"""
return pulumi.get(self, "weekly_schedule")
class AwaitableGetSnapshotPolicyResult(GetSnapshotPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSnapshotPolicyResult(
daily_schedule=self.daily_schedule,
enabled=self.enabled,
hourly_schedule=self.hourly_schedule,
id=self.id,
location=self.location,
monthly_schedule=self.monthly_schedule,
name=self.name,
provisioning_state=self.provisioning_state,
tags=self.tags,
type=self.type,
weekly_schedule=self.weekly_schedule)
def get_snapshot_policy(account_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
snapshot_policy_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSnapshotPolicyResult:
"""
Snapshot policy information
:param str account_name: The name of the NetApp account
:param str resource_group_name: The name of the resource group.
:param str snapshot_policy_name: The name of the snapshot policy target
"""
__args__ = dict()
__args__['accountName'] = account_name
__args__['resourceGroupName'] = resource_group_name
__args__['snapshotPolicyName'] = snapshot_policy_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:netapp/v20200901:getSnapshotPolicy', __args__, opts=opts, typ=GetSnapshotPolicyResult).value
return AwaitableGetSnapshotPolicyResult(
daily_schedule=__ret__.daily_schedule,
enabled=__ret__.enabled,
hourly_schedule=__ret__.hourly_schedule,
id=__ret__.id,
location=__ret__.location,
monthly_schedule=__ret__.monthly_schedule,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
tags=__ret__.tags,
type=__ret__.type,
weekly_schedule=__ret__.weekly_schedule)
| 35.665
| 211
| 0.650357
|
8dfcf86ac17e1e72d480a74b8249533c3871b085
| 1,411
|
py
|
Python
|
hackerspace/tools/space_open.py
|
y0av/HackspaceOS
|
f20579f29c6659a8d768a4254809cf7ca94cf039
|
[
"MIT"
] | null | null | null |
hackerspace/tools/space_open.py
|
y0av/HackspaceOS
|
f20579f29c6659a8d768a4254809cf7ca94cf039
|
[
"MIT"
] | null | null | null |
hackerspace/tools/space_open.py
|
y0av/HackspaceOS
|
f20579f29c6659a8d768a4254809cf7ca94cf039
|
[
"MIT"
] | null | null | null |
def getOpenNowStatus(language):
from datetime import datetime
import calendar
import pytz
from getConfig import get_config
from hackerspace.templatetags.translate import landingpage
from hackerspace.models import Event
# if an event is happening - show space open. Else use opening hours defined by config.json
if Event.objects.QUERYSET__now():
translated_status = landingpage('Open now', language)
color_indicator = 'green'
else:
timezone = pytz.timezone(get_config('PHYSICAL_SPACE.TIMEZONE_STRING'))
today_weekday = calendar.day_name[datetime.now(timezone).weekday()]
now_hour = datetime.now(timezone).hour
now_minute = datetime.now(timezone).minute
status = 'Unknown'
for status_change in get_config('PHYSICAL_SPACE.OPENING_HOURS')[today_weekday]:
if now_hour >= status_change[0] and now_minute >= status_change[1]:
status = status_change[2]
translated_status = landingpage(status, language)
color_indicator = status_change[3]
else:
break
if language == 'hebrew':
return '<div dir="rtl" align="right">'+translated_status+'</div><div class="status_code_icon '+color_indicator+' rtl"></div>'
else:
return '<div class="status_code_icon '+color_indicator+'"></div><div>'+translated_status+'</div>'
| 45.516129
| 133
| 0.672573
|
903f46dd8655e6f1ebbe768ac4a22b943060c370
| 4,796
|
py
|
Python
|
main.py
|
TaehyeonKim-pyomu/CNN_compression_rank_selection_BayesOpt
|
3a8f1b2c2311a9edbdae1f71ba1f7a5890c614b9
|
[
"MIT"
] | 9
|
2020-05-23T12:06:46.000Z
|
2021-08-10T09:34:56.000Z
|
main.py
|
TaehyeonKim-pyomu/CNN_compression_rank_selection_BayesOpt
|
3a8f1b2c2311a9edbdae1f71ba1f7a5890c614b9
|
[
"MIT"
] | 1
|
2020-11-23T15:51:52.000Z
|
2020-12-11T05:44:54.000Z
|
main.py
|
TaehyeonKim-pyomu/CNN_compression_rank_selection_BayesOpt
|
3a8f1b2c2311a9edbdae1f71ba1f7a5890c614b9
|
[
"MIT"
] | 2
|
2020-06-04T12:56:24.000Z
|
2020-07-26T12:56:49.000Z
|
import torch
import torchvision.models as models
import torch.nn as nn
import tensorly as tl
from tensorly.decomposition import partial_tucker
import GPyOpt
import GPy
from GPyOpt.models.gpmodel import GPModel
from GPyOpt.core.task.space import Design_space
from GPyOpt.acquisitions.EI import AcquisitionEI
import numpy as np
global conv
model = models.vgg16(pretrained=True)
class BayesOpt_rank_selection:
def f(self, x):
x1 = x[:, 0]
x2 = x[:, 1]
ranks = [int(x1), int(x2)]
core, [last, first] = partial_tucker(
conv.weight.data.cpu().numpy(), modes=[0, 1], ranks=ranks, init="svd"
)
recon_error = tl.norm(
conv.weight.data.cpu().numpy() - tl.tucker_to_tensor((core, [last, first])),
2,
) / tl.norm(conv.weight.data.cpu().numpy(), 2)
# recon_error = np.nan_to_num(recon_error)
ori_out = conv.weight.data.shape[0]
ori_in = conv.weight.data.shape[1]
ori_ker = conv.weight.data.shape[2]
ori_ker2 = conv.weight.data.shape[3]
first_out = first.shape[0]
first_in = first.shape[1]
core_out = core.shape[0]
core_in = core.shape[1]
last_out = last.shape[0]
last_in = last.shape[1]
original_computation = ori_out * ori_in * ori_ker * ori_ker2
decomposed_computation = (
(first_out * first_in)
+ (core_in * core_out * ori_ker * ori_ker2)
+ (last_in * last_out)
)
computation_error = decomposed_computation / original_computation
if computation_error > 1.0:
computation_error = 5.0
Error = float(recon_error + computation_error)
print("%d, %d, %f, %f, %f" % (x1, x2, recon_error, computation_error, Error))
return Error
def estimate_ranks_BayesOpt():
func = BayesOpt_rank_selection()
axis_0 = conv.weight.data.shape[0]
axis_1 = conv.weight.data.shape[1]
space = [
{"name": "rank_1", "type": "continuous", "domain": (2, axis_0 - 1)},
{"name": "rank_2", "type": "continuous", "domain": (2, axis_1 - 1)},
]
feasible_region = GPyOpt.Design_space(space=space)
initial_design = GPyOpt.experiment_design.initial_design(
"random", feasible_region, 10
)
objective = GPyOpt.core.task.SingleObjective(func.f)
model = GPyOpt.models.GPModel(exact_feval=True, optimize_restarts=10, verbose=False)
acquisition_optimizer = GPyOpt.optimization.AcquisitionOptimizer(feasible_region)
acquisition = GPyOpt.acquisitions.AcquisitionEI(
model, feasible_region, optimizer=acquisition_optimizer
)
evaluator = GPyOpt.core.evaluators.Sequential(acquisition)
bo = GPyOpt.methods.ModularBayesianOptimization(
model, feasible_region, objective, acquisition, evaluator, initial_design
)
max_time = None
tolerance = 10e-3
max_iter = 3
bo.run_optimization(
max_iter=max_iter, max_time=max_time, eps=tolerance, verbosity=True
)
bo.plot_acquisition()
bo.plot_convergence()
rank1 = int(bo.x_opt[0])
rank2 = int(bo.x_opt[1])
ranks = [rank1, rank2]
return ranks
def BayesOpt_tucker_decomposition():
ranks = estimate_ranks_BayesOpt()
print(conv, "BayesOpt estimated ranks", ranks)
core, [last, first] = partial_tucker(
conv.weight.data.cpu().numpy(), modes=[0, 1], tol=10e-5, ranks=ranks, init="svd"
)
first_layer = torch.nn.Conv2d(
in_channels=first.shape[0], out_channels=first.shape[1], kernel_size=1, stride=1
)
core_layer = torch.nn.Conv2d(
in_channels=core.shape[1],
out_channels=core.shape[0],
kernel_size=conv.kernel_size,
stride=conv.stride,
padding=conv.padding,
dilation=conv.dilation,
bias=False,
)
last_layer = torch.nn.Conv2d(
in_channels=last.shape[1], out_channels=last.shape[0], kernel_size=1, stride=1
)
first = torch.from_numpy(first.copy())
last = torch.from_numpy(last.copy())
core = torch.from_numpy(core.copy())
first_layer.weight.data = (
torch.transpose(first, 1, 0).unsqueeze(-1).unsqueeze(-1).data.cuda()
)
last_layer.weight.data = last.unsqueeze(-1).unsqueeze(-1).data.cuda()
core_layer.weight.data = core.data.cuda()
new_layers = [first_layer, core_layer, last_layer]
return nn.Sequential(*new_layers)
for i, key in enumerate(model.features._modules.keys()):
if isinstance(model.features._modules[key], torch.nn.modules.conv.Conv2d):
conv = model.features._modules[key].cuda().eval().cpu()
decomposed = BayesOpt_tucker_decomposition()
model.features._modules[key] = decomposed
torch.save(model, "BayesOpt_decomposed_model")
| 29.243902
| 88
| 0.653253
|
9841837cb5f19c48163ca53a03fff2c8dbdbdcea
| 457
|
py
|
Python
|
Cudoviste/cudo.py
|
rajitbanerjee/kattis
|
3a5dd4c84c07e21f09ef45ebd9c1bad2a0adc6ad
|
[
"CC0-1.0"
] | 2
|
2021-04-13T12:56:30.000Z
|
2022-03-21T16:46:58.000Z
|
Cudoviste/cudo.py
|
rajitbanerjee/kattis
|
3a5dd4c84c07e21f09ef45ebd9c1bad2a0adc6ad
|
[
"CC0-1.0"
] | null | null | null |
Cudoviste/cudo.py
|
rajitbanerjee/kattis
|
3a5dd4c84c07e21f09ef45ebd9c1bad2a0adc6ad
|
[
"CC0-1.0"
] | 1
|
2021-11-14T14:06:46.000Z
|
2021-11-14T14:06:46.000Z
|
"""https://open.kattis.com/problems/cudoviste"""
R, C = map(int, input().split())
squashed = [0] * 5
mat = []
for _ in range(R):
mat.append(list(input()))
for i in range(R - 1):
for j in range(C - 1):
space = [mat[i][j], mat[i][j + 1], mat[i + 1][j], mat[i + 1][j + 1]]
buildings = space.count('#')
cars = space.count('X')
if buildings == 0:
squashed[cars] += 1
for cars in squashed:
print(cars)
| 21.761905
| 76
| 0.516411
|
6263c2a0138eb30156950f144166360d8e4479c8
| 2,961
|
py
|
Python
|
venv/lib/python3.8/site-packages/ansible_collections/cisco/ise/plugins/action/network_access_conditions_for_authorization_rule_info.py
|
saeedya/docker-ansible
|
6fb0cfc6bc4a5925b21380952a5a4502ec02119a
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/ansible_collections/cisco/ise/plugins/action/network_access_conditions_for_authorization_rule_info.py
|
saeedya/docker-ansible
|
6fb0cfc6bc4a5925b21380952a5a4502ec02119a
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/ansible_collections/cisco/ise/plugins/action/network_access_conditions_for_authorization_rule_info.py
|
saeedya/docker-ansible
|
6fb0cfc6bc4a5925b21380952a5a4502ec02119a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2021, Cisco Systems
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
try:
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
except ImportError:
ANSIBLE_UTILS_IS_INSTALLED = False
else:
ANSIBLE_UTILS_IS_INSTALLED = True
from ansible.errors import AnsibleActionFail
from ansible_collections.cisco.ise.plugins.plugin_utils.ise import (
ISESDK,
ise_argument_spec,
)
# Get common arguements specification
argument_spec = ise_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
))
required_if = []
required_one_of = []
mutually_exclusive = []
required_together = []
class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
if not ANSIBLE_UTILS_IS_INSTALLED:
raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
super(ActionModule, self).__init__(*args, **kwargs)
self._supports_async = False
self._supports_check_mode = True
self._result = None
# Checks the supplied parameters against the argument spec for this module
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=dict(argument_spec=argument_spec),
schema_format="argspec",
schema_conditionals=dict(
required_if=required_if,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
required_together=required_together,
),
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
raise AnsibleActionFail(errors)
def get_object(self, params):
new_object = dict(
)
return new_object
def run(self, tmp=None, task_vars=None):
self._task.diff = False
self._result = super(ActionModule, self).run(tmp, task_vars)
self._result["changed"] = False
self._check_argspec()
self._result.update(dict(ise_response={}))
ise = ISESDK(params=self._task.args)
id = self._task.args.get("id")
name = self._task.args.get("name")
if not name and not id:
response = ise.exec(
family="network_access_conditions",
function='get_network_access_conditions_for_authorization_rules',
params=self.get_object(self._task.args),
).response
self._result.update(dict(ise_response=response))
self._result.update(ise.exit_json())
return self._result
| 33.269663
| 128
| 0.667004
|
0df0c48870c45770fb515781cca46df18dda87a5
| 2,201
|
py
|
Python
|
unit_tests/lenet.py
|
evtHsa/sdcTrafficSignClassifier
|
73bfea8964dc6be5ee7b70105511f255a13f5cdb
|
[
"MIT"
] | null | null | null |
unit_tests/lenet.py
|
evtHsa/sdcTrafficSignClassifier
|
73bfea8964dc6be5ee7b70105511f255a13f5cdb
|
[
"MIT"
] | null | null | null |
unit_tests/lenet.py
|
evtHsa/sdcTrafficSignClassifier
|
73bfea8964dc6be5ee7b70105511f255a13f5cdb
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
def LeNet(x):
# Arguments used for tf.truncated_normal, randomly defines variables for the
# weights and biases for each layer
# conv strides: (batch, height, width, depth)
# 2DO: Layer 1: Convolutional. Input = 32x32x1. Output = 28x28x6.
conv1_W = tf.Variable(tf.truncated_normal(shape=(5, 5, 3, 6), mean = mu,
stddev = sigma))
conv1_b = tf.Variable(tf.zeros(6))
conv1 = tf.nn.conv2d(x, conv1_W, strides=[1, 1, 1, 1],
padding='VALID') + conv1_b
# 2DO: Activation.
conv1 = tf.nn.relu(conv1)
# 2DO: Pooling. Input = 28x28x6. Output = 14x14x6
conv1 = tf.nn.max_pool(conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='VALID')
# 2DO: Layer 2: Convolutional. Output = 10x10x16.
conv2_W = tf.Variable(tf.truncated_normal(shape=(5, 5, 6, 16), mean = mu,
stddev = sigma))
conv2_b = tf.Variable(tf.zeros(16))
conv2 = tf.nn.conv2d(conv1, conv2_W, strides=[1, 1, 1, 1],
padding='VALID') + conv2_b
# 2DO: Activation.
conv2 = tf.nn.relu(conv2)
# 2DO: Pooling. Input = 10x10x16. Output = 5x5x16.
conv2 = tf.nn.max_pool(conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='VALID')
# 2DO: Flatten. Input = 5x5x16. Output = 400.
# https://www.tensorflow.org/api_docs/python/tf/contrib/layers/flatten
flat = flatten(conv2)
# TODO: Layer 3: Fully Connected. Input = 400. Output = 120.
# https://github.com/aymericdamien/TensorFlow-Examples/blob/master/\
# examples/3_NeuralNetworks/convolutional_network.py
# https://www.tensorflow.org/api_docs/python/tf/layers/dense
fc1 = tf.layers.dense(flat, 120)
# TODO: Activation.
fc1 = tf.nn.relu(fc1)
# TODO: Layer 4: Fully Connected. Input = 120. Output = 84.
fc2 = tf.layers.dense(fc1, 84)
# TODO: Activation.
fc2 = tf.nn.relu(fc2)
# TODO: Layer 5: Fully Connected. Input = 84. Output = 10.
logits = tf.layers.dense(fc2, DD.n_classes)
return logits
| 36.683333
| 80
| 0.580645
|
f5ac5e093e67724ceed9bece2fb8ea15562c0673
| 1,572
|
py
|
Python
|
tests/test_datasets/test_download.py
|
souravsingh/yellowbrick
|
a5941a6c47fbe5264f3622bc15276ba618bbe1d0
|
[
"Apache-2.0"
] | 2
|
2018-05-24T01:32:03.000Z
|
2018-05-27T19:56:02.000Z
|
tests/test_datasets/test_download.py
|
souravsingh/yellowbrick
|
a5941a6c47fbe5264f3622bc15276ba618bbe1d0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_datasets/test_download.py
|
souravsingh/yellowbrick
|
a5941a6c47fbe5264f3622bc15276ba618bbe1d0
|
[
"Apache-2.0"
] | null | null | null |
# tests.test_datasets.test_download
# Tests the download from S3 to ensure data is accessible.
#
# Author: Benjamin Bengfort <benjamin@bengfort.com>
# Created: Tue Jan 01 15:06:05 2019 -0500
#
# For license information, see LICENSE.txt
#
# ID: test_download.py [] benjamin@bengfort.com $
"""
Tests the download from S3 to ensure data is accessible.
"""
##########################################################################
## Imports
##########################################################################
import pytest
from yellowbrick.datasets.loaders import *
from yellowbrick.datasets.loaders import DATASETS
from yellowbrick.datasets.path import dataset_exists, dataset_archive
@pytest.mark.parametrize("loader", [
load_bikeshare, load_concrete, load_credit, load_energy, load_game,
load_mushroom, load_occupancy, load_spam, load_walking, load_hobbies,
], ids=lambda l: l.__name__)
def test_loader_download(tmpdir, loader):
"""
Test download of dataset when it does not exist (requires Internet connection!)
"""
name = loader.__name__[len("load_"):]
data_home = str(tmpdir.mkdir("datasets"))
# The dataset should not exist
assert not dataset_exists(name, data_home=data_home)
assert not dataset_archive(name, DATASETS[name]["signature"], data_home=data_home)
# Load the dataset
loader(data_home=data_home)
# The dataset should have been downloaded
assert dataset_exists(name, data_home=data_home)
assert dataset_archive(name, DATASETS[name]["signature"], data_home=data_home)
| 34.173913
| 86
| 0.673664
|
e5223dad3ffcd941cfcb61215af23420d2750571
| 494
|
py
|
Python
|
Desafio018.py
|
GabrielSanchesRosa/Python
|
3a129e27e076b2a91af03d68ede50b9c45c50217
|
[
"MIT"
] | null | null | null |
Desafio018.py
|
GabrielSanchesRosa/Python
|
3a129e27e076b2a91af03d68ede50b9c45c50217
|
[
"MIT"
] | null | null | null |
Desafio018.py
|
GabrielSanchesRosa/Python
|
3a129e27e076b2a91af03d68ede50b9c45c50217
|
[
"MIT"
] | null | null | null |
# Faça um programa que leia um ângulo qualquer e mostre na tela o valor do seno, cosseno e tangente desse ângulo.
from math import radians, sin, cos, tan
angulo = float(input('Digite o ângulo que você deseja: '))
seno = sin(radians(angulo))
print(f'O ângulo de {angulo} tem o SENO de {seno:.2f}.')
cosseno = cos(radians(angulo))
print(f'O ângulo de {angulo} tem o COSSENO de {cosseno:.2f}.')
tangente = tan(radians(angulo))
print(f'O ângulo de {angulo} tem a TANGENTE de {tangente:.2f}.')
| 32.933333
| 113
| 0.714575
|
c9aec3c930ea60ea55cdd803e3a8ae738d073870
| 663
|
py
|
Python
|
research/delf/delf/python/training/create_oxford5k_list.py
|
fwtan/models
|
d0e9833d33a7a699c80a60fd1d342eb2e923d755
|
[
"Apache-2.0"
] | null | null | null |
research/delf/delf/python/training/create_oxford5k_list.py
|
fwtan/models
|
d0e9833d33a7a699c80a60fd1d342eb2e923d755
|
[
"Apache-2.0"
] | null | null | null |
research/delf/delf/python/training/create_oxford5k_list.py
|
fwtan/models
|
d0e9833d33a7a699c80a60fd1d342eb2e923d755
|
[
"Apache-2.0"
] | null | null | null |
from glob import glob
import os.path as osp
import pickle
def pickle_load(path):
with open(path, 'rb') as fid:
data_ = pickle.load(fid)
return data_
gnd = pickle_load('oxford5k/gnd_roxford5k.pkl')
query_names = gnd['qimlist']
index_names = gnd['imlist']
query_paths = ['oxford5k/oxford5k_images/%s.jpg'%x for x in query_names]
index_paths = ['oxford5k/oxford5k_images/%s.jpg'%x for x in index_names]
# jpgs = sorted(glob('oxford5k/oxford5k_images/*.jpg', recursive=True))
with open('oxford5k/query_images.txt', 'w') as f:
f.write('\n'.join(query_paths))
with open('oxford5k/index_images.txt', 'w') as f:
f.write('\n'.join(index_paths))
| 33.15
| 72
| 0.710407
|
951555cdfd92834a5eea0c15334da170cba5eee8
| 7,163
|
py
|
Python
|
core/python/kungfu/data/sqlite/models.py
|
yunnant/kungfu
|
03dba19c922a5950068bd2d223488b8543ad8dd1
|
[
"Apache-2.0"
] | null | null | null |
core/python/kungfu/data/sqlite/models.py
|
yunnant/kungfu
|
03dba19c922a5950068bd2d223488b8543ad8dd1
|
[
"Apache-2.0"
] | 1
|
2019-08-23T01:52:33.000Z
|
2019-08-23T01:52:33.000Z
|
core/python/kungfu/data/sqlite/models.py
|
yunnant/kungfu
|
03dba19c922a5950068bd2d223488b8543ad8dd1
|
[
"Apache-2.0"
] | null | null | null |
from . import *
from sqlalchemy import Column, ForeignKey, Integer, String, Date, Float, Boolean, PrimaryKeyConstraint
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
import time
Base = declarative_base()
class Account(Base):
__tablename__ = 'account_config'
account_id = Column(String, nullable=False, primary_key=True)
source_name = Column(String)
receive_md = Column(Boolean)
config = Column(Json, nullable=False)
class Holiday(Base):
__tablename__ = "holidays"
__table_args__ = (PrimaryKeyConstraint('region', 'holiday'),)
region = Column(String)
holiday = Column(Date)
class FutureInstrument(Base):
__tablename__ = 'future_instrument'
__table_args__ = (PrimaryKeyConstraint('instrument_id', 'exchange_id'),)
instrument_id = Column(String)
exchange_id = Column(String)
instrument_type = Column(Integer)
product_id = Column(String)
contract_multiplier = Column(Integer)
price_tick = Column(Float)
open_date = Column(String)
create_date = Column(String)
expire_date = Column(String)
delivery_year = Column(Integer)
delivery_month = Column(Integer)
is_trading = Column(Boolean)
long_margin_ratio = Column(Float)
short_margin_ratio = Column(Float)
class Commission(Base):
__tablename__ = 'commission'
__table_args__ = (PrimaryKeyConstraint('instrument_id', 'exchange_id', 'instrument_type'),)
instrument_id = Column(String)
exchange_id = Column(String)
instrument_type = Column(Integer)
account_id = Column(String)
broker_id = Column(String)
mode = Column(Integer)
open_ratio = Column(Float)
close_ratio = Column(Float)
close_today_ratio = Column(Float)
min_commission = Column(Float)
class Order(Base):
__tablename__ = 'orders'
order_id = Column(String, primary_key = True)
insert_time = Column(Integer)
update_time = Column(Integer)
trading_day = Column(String)
instrument_id = Column(String)
exchange_id = Column(String)
account_id = Column(String)
client_id = Column(String)
instrument_type = Column(Integer)
limit_price = Column(Float)
frozen_price = Column(Float)
volume = Column(Integer)
volume_traded = Column(Integer)
volume_left = Column(Integer)
tax = Column(Float)
commission = Column(Float)
status = Column(Integer)
error_id = Column(Integer)
error_msg = Column(String)
side = Column(Integer)
offset = Column(Integer)
price_type = Column(Integer)
volume_condition = Column(Integer)
time_condition = Column(Integer)
parent_id = Column(String)
class Trade(Base):
__tablename__ = 'trades'
trade_id = Column(String, primary_key = True)
order_id = Column(String, nullable = False)
parent_order_id = Column(String)
trade_time = Column(Integer)
trading_day = Column(String)
instrument_id = Column(String)
exchange_id = Column(String)
account_id = Column(String)
client_id = Column(String)
instrument_type = Column(Integer)
side = Column(Integer)
offset = Column(Integer)
price = Column(Float)
volume = Column(Integer)
tax = Column(Float)
commission = Column(Float)
class AssetMixin:
trading_day = Column(String)
update_time = Column(Integer)
ledger_category = Column(Integer)
account_id = Column(String)
source_id = Column(String)
client_id = Column(String)
initial_equity = Column(Float)
static_equity = Column(Float)
dynamic_equity = Column(Float)
realized_pnl = Column(Float)
unrealized_pnl = Column(Float)
avail = Column(Float)
market_value = Column(Float)
margin = Column(Float)
accumulated_fee = Column(Float)
intraday_fee = Column(Float)
frozen_cash = Column(Float)
frozen_margin = Column(Float)
frozen_fee = Column(Float)
position_pnl = Column(Float)
close_pnl = Column(Float)
def __init__(self, **kwargs):
if not "update_time" in kwargs:
kwargs["update_time"] = int(round(time.time() * 1000000000))
for attr in self.__mapper__.columns.keys():
if attr in kwargs:
setattr(self, attr, kwargs[attr])
class Asset(AssetMixin, Base):
__tablename__ = "asset"
__table_args__ = (PrimaryKeyConstraint('account_id','client_id', 'ledger_category'),)
class AssetSnapshot(AssetMixin, Base):
__tablename__ = "asset_snapshot"
__table_args__ = (PrimaryKeyConstraint('account_id', 'client_id', 'ledger_category', 'update_time'),)
class Position(Base):
__tablename__ = "position"
__table_args__ = (PrimaryKeyConstraint('account_id', 'client_id', 'ledger_category', 'instrument_id', 'exchange_id', 'direction'),)
update_time = Column(Integer)
trading_day = Column(String)
instrument_id = Column(String)
instrument_type = Column(Integer)
exchange_id = Column(String)
account_id = Column(String)
source_id = Column(String)
client_id = Column(String)
direction = Column(Integer)
volume = Column(Integer)
yesterday_volume = Column(Integer)
frozen_total = Column(Integer)
frozen_yesterday =Column(Integer)
last_price = Column(Float)
avg_open_price = Column(Float)
position_cost_price = Column(Float)
close_price = Column(Float)
pre_close_price = Column(Float)
settlement_price = Column(Float)
pre_settlement_price = Column(Float)
margin = Column(Float)
position_pnl = Column(Float)
close_pnl = Column(Float)
realized_pnl = Column(Float)
unrealized_pnl = Column(Float)
margin_ratio = Column(Float)
contract_multiplier = Column(Integer)
ledger_category = Column(Integer)
def __init__(self, **kwargs):
if not "update_time" in kwargs:
kwargs["update_time"] = int(round(time.time() * 1000000000))
for attr in self.__mapper__.columns.keys():
if attr in kwargs:
setattr(self, attr, kwargs[attr])
class PositionDetail(Base):
__tablename__ = "position_detail"
__table_args__ = (PrimaryKeyConstraint('ledger_category', 'trade_id'),)
update_time = Column(Integer)
trading_day = Column(String)
instrument_id = Column(String)
instrument_type = Column(String)
exchange_id = Column(String)
account_id = Column(String)
source_id = Column(String)
client_id = Column(String)
direction = Column(Integer)
volume = Column(Integer)
frozen_volume = Column(Integer)
last_price = Column(Float)
open_price = Column(Float)
settlement_price = Column(Float)
pre_settlement_price = Column(Float)
open_date = Column(String)
trade_id = Column(String)
trade_time = Column(Integer)
margin_ratio = Column(Float)
contract_multiplier = Column(Integer)
ledger_category = Column(Integer)
def __init__(self, **kwargs):
for attr in self.__mapper__.columns.keys():
if attr in kwargs:
value = kwargs[attr]
if attr == "trade_id":
value = str(value)
setattr(self, attr, value)
| 29.356557
| 135
| 0.688818
|
cc756809aedcb181b2e8372a280b8d19169908e1
| 1,181
|
py
|
Python
|
src/pydi/pydi.py
|
chadwell1028/pydi
|
361cdd517ade0177c658747b5a6c62bbe6488295
|
[
"MIT"
] | null | null | null |
src/pydi/pydi.py
|
chadwell1028/pydi
|
361cdd517ade0177c658747b5a6c62bbe6488295
|
[
"MIT"
] | null | null | null |
src/pydi/pydi.py
|
chadwell1028/pydi
|
361cdd517ade0177c658747b5a6c62bbe6488295
|
[
"MIT"
] | null | null | null |
import glob
import importlib
import inspect
import logging
import os
import re
import sys
from os.path import isfile, basename
class Pydi:
def __init__(self, directory=None):
self._logger = logging.getLogger(__name__)
self._directory = directory or os.getcwd()
self._class_type_map = self._detect_classes()
print(self._class_type_map)
def _detect_classes(self):
self._logger.info('Detecting project\'s classes')
current_dir = self._directory
modules = glob.glob(current_dir + "/**/*.py", recursive=True)
top_folder = basename(os.getcwd())
class_type_names = [f.split(top_folder + '\\')[-1].replace('\\', '.').replace('.py', '') or f.split(top_folder + '/')[-1].replace('/', '.').replace('.py', '') for f in modules if isfile(f) and not f.endswith('__init__.py')]
class_types = set((inspect.getmembers(sys.modules[importlib.import_module(name).__name__], inspect.isclass)[0][1]) for name in class_type_names[1:])
return {type.__name__.lower(): type for type in class_types}
def build_dependency(self, dependency_name):
return self._class_type_map[dependency_name]()
| 35.787879
| 231
| 0.676545
|
76226f08c6fe33e5c4ed1c95b877e0dccf132ec7
| 5,059
|
py
|
Python
|
beetsplug/mbsync.py
|
Profpatsch/beets
|
eb5c37ecc0f5c7ab7ac902d8d88219c068b9db1f
|
[
"MIT"
] | 1
|
2015-11-06T03:32:25.000Z
|
2015-11-06T03:32:25.000Z
|
beetsplug/mbsync.py
|
wudangt/beets
|
b7da5df5eef6dd6be2a8888035f2dd0077e6ebaa
|
[
"MIT"
] | null | null | null |
beetsplug/mbsync.py
|
wudangt/beets
|
b7da5df5eef6dd6be2a8888035f2dd0077e6ebaa
|
[
"MIT"
] | null | null | null |
# This file is part of beets.
# Copyright 2014, Jakob Schnitzer.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Update library's tags using MusicBrainz.
"""
import logging
from beets.plugins import BeetsPlugin
from beets import autotag, library, ui, util
from beets.autotag import hooks
from beets import config
log = logging.getLogger('beets')
def mbsync_singletons(lib, query, move, pretend, write):
"""Retrieve and apply info from the autotagger for items matched by
query.
"""
for item in lib.items(query + ['singleton:true']):
if not item.mb_trackid:
log.info(u'Skipping singleton {0}: has no mb_trackid'
.format(item.title))
continue
# Get the MusicBrainz recording info.
track_info = hooks.track_for_mbid(item.mb_trackid)
if not track_info:
log.info(u'Recording ID not found: {0}'.format(item.mb_trackid))
continue
# Apply.
with lib.transaction():
autotag.apply_item_metadata(item, track_info)
apply_item_changes(lib, item, move, pretend, write)
def mbsync_albums(lib, query, move, pretend, write):
"""Retrieve and apply info from the autotagger for albums matched by
query and their items.
"""
# Process matching albums.
for a in lib.albums(query):
if not a.mb_albumid:
log.info(u'Skipping album {0}: has no mb_albumid'.format(a.id))
continue
items = list(a.items())
# Get the MusicBrainz album information.
album_info = hooks.album_for_mbid(a.mb_albumid)
if not album_info:
log.info(u'Release ID not found: {0}'.format(a.mb_albumid))
continue
# Construct a track mapping according to MBIDs. This should work
# for albums that have missing or extra tracks.
mapping = {}
for item in items:
for track_info in album_info.tracks:
if item.mb_trackid == track_info.track_id:
mapping[item] = track_info
break
# Apply.
with lib.transaction():
autotag.apply_metadata(album_info, mapping)
changed = False
for item in items:
item_changed = ui.show_model_changes(item)
changed |= item_changed
if item_changed:
apply_item_changes(lib, item, move, pretend, write)
if not changed:
# No change to any item.
continue
if not pretend:
# Update album structure to reflect an item in it.
for key in library.Album.item_keys:
a[key] = items[0][key]
a.store()
# Move album art (and any inconsistent items).
if move and lib.directory in util.ancestry(items[0].path):
log.debug(u'moving album {0}'.format(a.id))
a.move()
def apply_item_changes(lib, item, move, pretend, write):
"""Store, move and write the item according to the arguments.
"""
if not pretend:
# Move the item if it's in the library.
if move and lib.directory in util.ancestry(item.path):
item.move(with_album=False)
if write:
item.try_write()
item.store()
def mbsync_func(lib, opts, args):
"""Command handler for the mbsync function.
"""
move = opts.move
pretend = opts.pretend
write = opts.write
query = ui.decargs(args)
mbsync_singletons(lib, query, move, pretend, write)
mbsync_albums(lib, query, move, pretend, write)
class MBSyncPlugin(BeetsPlugin):
def __init__(self):
super(MBSyncPlugin, self).__init__()
def commands(self):
cmd = ui.Subcommand('mbsync',
help='update metadata from musicbrainz')
cmd.parser.add_option('-p', '--pretend', action='store_true',
help='show all changes but do nothing')
cmd.parser.add_option('-M', '--nomove', action='store_false',
default=True, dest='move',
help="don't move files in library")
cmd.parser.add_option('-W', '--nowrite', action='store_false',
default=config['import']['write'], dest='write',
help="don't write updated metadata to files")
cmd.func = mbsync_func
return [cmd]
| 35.131944
| 78
| 0.603084
|
b0d35f275868ba88d689bd48482aab22bdfc61d0
| 792
|
py
|
Python
|
tests/parser/syntax/test_logging.py
|
ryan-rozario/vyper
|
9d235e6e7e85ee0dbfaf54a6efd5fb6334c2d00f
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/syntax/test_logging.py
|
ryan-rozario/vyper
|
9d235e6e7e85ee0dbfaf54a6efd5fb6334c2d00f
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/syntax/test_logging.py
|
ryan-rozario/vyper
|
9d235e6e7e85ee0dbfaf54a6efd5fb6334c2d00f
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from pytest import (
raises,
)
from vyper import (
compiler,
)
from vyper.exceptions import (
InvalidLiteral,
TypeMismatch,
)
fail_list = [
"""
Bar: event({_value: int128[4]})
x: decimal[4]
@public
def foo():
log.Bar(self.x)
""",
"""
Bar: event({_value: int128[4]})
@public
def foo():
x: decimal[4] = [0.0, 0.0, 0.0, 0.0]
log.Bar(x)
""",
("""
Test: event({ n: uint256 })
@public
def test():
log.Test(-7)
""", InvalidLiteral),
]
@pytest.mark.parametrize('bad_code', fail_list)
def test_logging_fail(bad_code):
if isinstance(bad_code, tuple):
with raises(bad_code[1]):
compiler.compile_code(bad_code[0])
else:
with raises(TypeMismatch):
compiler.compile_code(bad_code)
| 15.84
| 47
| 0.59596
|
f37c126499bc5b1d855a5f53b265c99a78a2ba32
| 7,272
|
py
|
Python
|
pyjfuzz/core/pjf_factory.py
|
ParrotSec/PyJFuzz
|
b206ef75ef8f941c660ac1f70e533bf79ed3fd0c
|
[
"MIT"
] | 1
|
2017-02-10T15:07:20.000Z
|
2017-02-10T15:07:20.000Z
|
pyjfuzz/core/pjf_factory.py
|
ParrotSec/PyJFuzz
|
b206ef75ef8f941c660ac1f70e533bf79ed3fd0c
|
[
"MIT"
] | null | null | null |
pyjfuzz/core/pjf_factory.py
|
ParrotSec/PyJFuzz
|
b206ef75ef8f941c660ac1f70e533bf79ed3fd0c
|
[
"MIT"
] | 2
|
2018-09-15T15:12:38.000Z
|
2020-07-15T01:24:48.000Z
|
"""
The MIT License (MIT)
Copyright (c) 2016 Daniele Linguaglossa <d.linguaglossa@mseclab.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from errors import PJFInvalidType, PJFMissingArgument, PJFBaseException
from pjf_mutators import PJFMutators
from pjf_mutation import PJFMutation
from pjf_encoder import PJFEncoder
from pjf_logger import PJFLogger
import time
import json
import urllib
class PJFFactory(object):
def __init__(self, configuration):
"""
Class that represent a JSON object
"""
self.logger = self.init_logger()
if ["json", "json_file", "strong_fuzz", "parameters", "exclude_parameters", "url_encode", "indent",
"utf8"] not in configuration:
raise PJFMissingArgument("Some arguments are missing from PJFFactory object")
self.config = configuration
self.mutator = PJFMutation(self.config)
other = self.config.json
if not self.config.strong_fuzz:
if type(other) == dict:
self.json = other
elif type(other) == list:
self.json = {"array": other}
else:
raise PJFInvalidType(other, dict)
else:
if self.config.json_file:
self.json = other
else:
self.json = json.dumps(other)
self.logger.debug("[{0}] - PJFFactory successfully initialized".format(time.strftime("%H:%M:%S")))
def __add__(self, other):
"""
Add keys to dictionary merging with another dictionary object
"""
self.json.update(other)
return self
def __sub__(self, other):
"""
Removes keys from self dictionary based on provided list
"""
if type(other) == list:
for element in other:
if element in self.json:
del self.json[element]
return self
else:
raise PJFInvalidType(other, list)
def __eq__(self, other):
"""
Check if two object are equal
"""
return self.json == other
def __getitem__(self, item):
"""
Extract an item from the JSON object
"""
if type(item) == str:
return self.json[item]
else:
return self.json
def __setitem__(self, key, value):
"""
Set a JSON attribute
"""
self.json[key] = value
def __contains__(self, items):
"""
Check if JSON object contains a key
"""
try:
if type(items) != list:
raise PJFInvalidType(items, list)
ret = 0
for item in items:
for key in self.json:
if isinstance(self.json[key], PJFFactory):
ret += item in self.json[key]
elif item == key:
ret += 1
return len(items) == ret
except Exception as e:
raise PJFBaseException(e.message)
def __repr__(self):
"""
Represent the JSON object
"""
return str(self.json)
def fuzz_elements(self, element):
"""
Fuzz all elements inside the object
"""
try:
if type(element) == dict:
tmp_element = {}
for key in element:
if self.config.parameters:
if self.config.exclude_parameters:
fuzz = key not in self.config.parameters
else:
fuzz = key in self.config.parameters
else:
fuzz = True
if fuzz:
if type(element[key]) == dict:
tmp_element.update({key: self.fuzz_elements(element[key])})
elif type(element[key]) == list:
tmp_element.update({key: self.fuzz_elements(element[key])})
else:
tmp_element.update({key: self.mutator.fuzz(element[key])})
else:
tmp_element.update({key: element[key]})
element = tmp_element
del tmp_element
elif type(element) == list:
arr = []
for key in element:
if type(key) == dict:
arr.append(self.fuzz_elements(key))
elif type(key) == list:
arr.append(self.fuzz_elements(key))
else:
arr.append(self.mutator.fuzz(key))
element = arr
del arr
except Exception as e:
raise PJFBaseException(e.message)
return element
def init_logger(self):
"""
Init the default logger
"""
return PJFLogger.init_logger()
@property
def fuzzed(self):
"""
Get a printable fuzzed object
"""
try:
if self.config.strong_fuzz:
fuzzer = PJFMutators(self.config)
if self.config.url_encode:
return urllib.quote(fuzzer.fuzz(json.dumps(self.config.json)))
else:
if type(self.config.json) in [list, dict]:
return fuzzer.fuzz(json.dumps(self.config.json))
else:
return fuzzer.fuzz(self.config.json)
else:
if self.config.url_encode:
return urllib.quote(self.get_fuzzed(self.config.indent, self.config.utf8))
else:
return self.get_fuzzed(self.config.indent, self.config.utf8)
except Exception as e:
raise PJFBaseException(e.message)
@PJFEncoder.json_encode
def get_fuzzed(self, indent=False, utf8=False):
"""
Return the fuzzed object
"""
try:
if "array" in self.json:
return self.fuzz_elements(dict(self.json))["array"]
else:
return self.fuzz_elements(dict(self.json))
except Exception as e:
raise PJFBaseException(e.message)
| 35.130435
| 107
| 0.550468
|
faa6eb0d250385d4bea018998948740d48d6ebbc
| 5,693
|
py
|
Python
|
samcli/local/docker/manager.py
|
theowenyoung/aws-sam-cli
|
27dbe8f426531365c26788382dbf0a1b0844192b
|
[
"Apache-2.0"
] | null | null | null |
samcli/local/docker/manager.py
|
theowenyoung/aws-sam-cli
|
27dbe8f426531365c26788382dbf0a1b0844192b
|
[
"Apache-2.0"
] | null | null | null |
samcli/local/docker/manager.py
|
theowenyoung/aws-sam-cli
|
27dbe8f426531365c26788382dbf0a1b0844192b
|
[
"Apache-2.0"
] | null | null | null |
"""
Provides classes that interface with Docker to create, execute and manage containers.
"""
import logging
import sys
import docker
import requests
from samcli.lib.utils.stream_writer import StreamWriter
LOG = logging.getLogger(__name__)
class ContainerManager:
"""
This class knows how to interface with Docker to create, execute and manage the container's life cycle. It can
run multiple containers in parallel, and also comes with the ability to reuse existing containers in order to
serve requests faster. It is also thread-safe.
"""
def __init__(self, docker_network_id=None, docker_client=None, skip_pull_image=False):
"""
Instantiate the container manager
:param docker_network_id: Optional Docker network to run this container in.
:param docker_client: Optional docker client object
:param bool skip_pull_image: Should we pull new Docker container image?
"""
self.skip_pull_image = skip_pull_image
self.docker_network_id = docker_network_id
self.docker_client = docker_client or docker.from_env()
@property
def is_docker_reachable(self):
"""
Checks if Docker daemon is running. This is required for us to invoke the function locally
Returns
-------
bool
True, if Docker is available, False otherwise
"""
return True
# try:
# self.docker_client.ping()
# return True
# # When Docker is not installed, a request.exceptions.ConnectionError is thrown.
# except (docker.errors.APIError, requests.exceptions.ConnectionError):
# LOG.debug("Docker is not reachable", exc_info=True)
# return False
def run(self, container, input_data=None, warm=False):
"""
Create and run a Docker container based on the given configuration.
:param samcli.local.docker.container.Container container: Container to create and run
:param input_data: Optional. Input data sent to the container through container's stdin.
:param bool warm: Indicates if an existing container can be reused. Defaults False ie. a new container will
be created for every request.
:raises DockerImagePullFailedException: If the Docker image was not available in the server
"""
# if warm:
# raise ValueError("The facility to invoke warm container does not exist")
# image_name = container.image
# is_image_local = self.has_image(image_name)
# # Skip Pulling a new image if: a) Image name is samcli/lambda OR b) Image is available AND
# # c) We are asked to skip pulling the image
# if (is_image_local and self.skip_pull_image) or image_name.startswith("samcli/lambda"):
# LOG.info("Requested to skip pulling images ...\n")
# else:
# try:
# self.pull_image(image_name)
# except DockerImagePullFailedException:
# if not is_image_local:
# raise DockerImagePullFailedException(
# "Could not find {} image locally and failed to pull it from docker.".format(image_name)
# )
# LOG.info("Failed to download a new %s image. Invoking with the already downloaded image.", image_name)
# if not container.is_created():
# # Create the container first before running.
# # Create the container in appropriate Docker network
# container.network_id = self.docker_network_id
# container.create()
# container.start(input_data=input_data)
def stop(self, container):
"""
Stop and delete the container
:param samcli.local.docker.container.Container container: Container to stop
"""
# container.delete()
def pull_image(self, image_name, stream=None):
"""
Ask Docker to pull the container image with given name.
Parameters
----------
image_name str
Name of the image
stream samcli.lib.utils.stream_writer.StreamWriter
Optional stream writer to output to. Defaults to stderr
Raises
------
DockerImagePullFailedException
If the Docker image was not available in the server
"""
stream_writer = stream or StreamWriter(sys.stderr)
try:
result_itr = self.docker_client.api.pull(image_name, stream=True, decode=True)
except docker.errors.APIError as ex:
LOG.debug("Failed to download image with name %s", image_name)
raise DockerImagePullFailedException(str(ex))
# io streams, especially StringIO, work only with unicode strings
stream_writer.write("\nFetching {} Docker container image...".format(image_name))
# Each line contains information on progress of the pull. Each line is a JSON string
for _ in result_itr:
# For every line, print a dot to show progress
stream_writer.write(".")
stream_writer.flush()
# We are done. Go to the next line
stream_writer.write("\n")
def has_image(self, image_name):
"""
Is the container image with given name available?
:param string image_name: Name of the image
:return bool: True, if image is available. False, otherwise
"""
try:
self.docker_client.images.get(image_name)
return True
except docker.errors.ImageNotFound:
return False
class DockerImagePullFailedException(Exception):
pass
| 35.805031
| 120
| 0.642895
|
a2ba04130292b1b20368abab7c5b4c8ea507bc6c
| 1,317
|
py
|
Python
|
tests/test_prompt.py
|
staticdev/irpf-investidor
|
d3f7cba1b6cc719d77b7d9f36202f6deb704152d
|
[
"MIT"
] | 8
|
2022-02-14T19:03:59.000Z
|
2022-03-30T03:52:20.000Z
|
tests/test_prompt.py
|
staticdev/irpf-investidor
|
d3f7cba1b6cc719d77b7d9f36202f6deb704152d
|
[
"MIT"
] | 12
|
2022-01-02T09:49:40.000Z
|
2022-03-31T12:04:17.000Z
|
tests/test_prompt.py
|
staticdev/irpf-investidor
|
d3f7cba1b6cc719d77b7d9f36202f6deb704152d
|
[
"MIT"
] | 2
|
2022-02-07T13:53:56.000Z
|
2022-03-08T02:18:40.000Z
|
"""Test cases for prompt module."""
import pytest
from pytest_mock import MockerFixture
import irpf_investidor.prompt as prompt
TRADES = [(0, "trade 1"), (0, "trade 2")]
@pytest.fixture
def mock_checkboxlist_dialog(mocker: MockerFixture) -> MockerFixture:
"""Fixture for mocking shortcuts.checkboxlist_dialog."""
return mocker.patch("prompt_toolkit.shortcuts.checkboxlist_dialog")
@pytest.fixture
def mock_yes_no_dialog(mocker: MockerFixture) -> MockerFixture:
"""Fixture for mocking shortcuts.yes_no_dialog."""
return mocker.patch("prompt_toolkit.shortcuts.yes_no_dialog")
def test_select_trades_empty(
mock_checkboxlist_dialog: MockerFixture, mock_yes_no_dialog: MockerFixture
) -> None:
"""It returns empty list."""
mock_checkboxlist_dialog.return_value.run.side_effect = [[], None]
mock_yes_no_dialog.return_value.run.side_effect = [False, True]
result = prompt.select_trades(TRADES)
assert mock_checkboxlist_dialog.call_count == 2
assert mock_yes_no_dialog.call_count == 2
assert result == []
def test_select_trades_some_selected(mock_checkboxlist_dialog: MockerFixture) -> None:
"""It returns list with id 1."""
mock_checkboxlist_dialog.return_value.run.return_value = [1]
result = prompt.select_trades(TRADES)
assert result == [1]
| 29.931818
| 86
| 0.754746
|
bab8fa380db0e59b4551c309861020fcc006f06e
| 186
|
py
|
Python
|
corehq/sql_accessors/migrations/0022_last_modified_form_id.py
|
dimagilg/commcare-hq
|
ea1786238eae556bb7f1cbd8d2460171af1b619c
|
[
"BSD-3-Clause"
] | 471
|
2015-01-10T02:55:01.000Z
|
2022-03-29T18:07:18.000Z
|
corehq/sql_accessors/migrations/0022_last_modified_form_id.py
|
dimagilg/commcare-hq
|
ea1786238eae556bb7f1cbd8d2460171af1b619c
|
[
"BSD-3-Clause"
] | 14,354
|
2015-01-01T07:38:23.000Z
|
2022-03-31T20:55:14.000Z
|
corehq/sql_accessors/migrations/0022_last_modified_form_id.py
|
dimagilg/commcare-hq
|
ea1786238eae556bb7f1cbd8d2460171af1b619c
|
[
"BSD-3-Clause"
] | 175
|
2015-01-06T07:16:47.000Z
|
2022-03-29T13:27:01.000Z
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sql_accessors', '0021_get_ledger_values_for_cases'),
]
operations = []
| 16.909091
| 62
| 0.688172
|
a21588ea414b3b1a60186de4686537783a609b46
| 30,990
|
py
|
Python
|
seleniumbase/fixtures/js_utils.py
|
kendrickassignemt/Selenium-Testing
|
b8833b1e9a1235e86d1383f64d1da18f811b8e4d
|
[
"MIT"
] | null | null | null |
seleniumbase/fixtures/js_utils.py
|
kendrickassignemt/Selenium-Testing
|
b8833b1e9a1235e86d1383f64d1da18f811b8e4d
|
[
"MIT"
] | null | null | null |
seleniumbase/fixtures/js_utils.py
|
kendrickassignemt/Selenium-Testing
|
b8833b1e9a1235e86d1383f64d1da18f811b8e4d
|
[
"MIT"
] | 1
|
2021-11-06T06:39:18.000Z
|
2021-11-06T06:39:18.000Z
|
"""
This module contains useful Javascript utility methods for base_case.py
These helper methods SHOULD NOT be called directly from tests.
"""
import re
import requests
import time
from selenium.common.exceptions import WebDriverException
from seleniumbase import config as sb_config
from seleniumbase.common import decorators
from seleniumbase.config import settings
from seleniumbase.fixtures import constants
from seleniumbase.fixtures import shared_utils
def wait_for_ready_state_complete(driver, timeout=settings.EXTREME_TIMEOUT):
"""
The DOM (Document Object Model) has a property called "readyState".
When the value of this becomes "complete", page resources are considered
fully loaded (although AJAX and other loads might still be happening).
This method will wait until document.readyState == "complete".
"""
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
shared_utils.check_if_time_limit_exceeded()
try:
# If there's an alert, skip
driver.switch_to.alert
return
except Exception:
# If there's no alert, continue
pass
try:
ready_state = driver.execute_script("return document.readyState")
except WebDriverException:
# Bug fix for: [Permission denied to access property "document"]
time.sleep(0.03)
return True
if ready_state == u'complete':
time.sleep(0.01) # Better be sure everything is done loading
return True
else:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
raise Exception(
"Page elements never fully loaded after %s seconds!" % timeout)
def execute_async_script(driver, script, timeout=settings.EXTREME_TIMEOUT):
driver.set_script_timeout(timeout)
return driver.execute_async_script(script)
def wait_for_angularjs(driver, timeout=settings.LARGE_TIMEOUT, **kwargs):
try:
# If there's an alert, skip
driver.switch_to.alert
return
except Exception:
# If there's no alert, continue
pass
if not settings.WAIT_FOR_ANGULARJS:
return
NG_WRAPPER = '%(prefix)s' \
'var $elm=document.querySelector(' \
'\'[data-ng-app],[ng-app],.ng-scope\')||document;' \
'if(window.angular && angular.getTestability){' \
'angular.getTestability($elm).whenStable(%(handler)s)' \
'}else{' \
'var $inj;try{$inj=angular.element($elm).injector()||' \
'angular.injector([\'ng\'])}catch(ex){' \
'$inj=angular.injector([\'ng\'])};$inj.get=$inj.get||' \
'$inj;$inj.get(\'$browser\').' \
'notifyWhenNoOutstandingRequests(%(handler)s)}' \
'%(suffix)s'
def_pre = 'var cb=arguments[arguments.length-1];if(window.angular){'
prefix = kwargs.pop('prefix', def_pre)
handler = kwargs.pop('handler', 'function(){cb(true)}')
suffix = kwargs.pop('suffix', '}else{cb(false)}')
script = NG_WRAPPER % {'prefix': prefix,
'handler': handler,
'suffix': suffix}
try:
execute_async_script(driver, script, timeout=timeout)
except Exception:
time.sleep(0.05)
def is_html_inspector_activated(driver):
try:
driver.execute_script("HTMLInspector") # Fails if not defined
return True
except Exception:
return False
def is_jquery_activated(driver):
try:
driver.execute_script("jQuery('html')") # Fails if jq is not defined
return True
except Exception:
return False
def wait_for_jquery_active(driver, timeout=None):
if not timeout:
timeout = int(settings.MINI_TIMEOUT * 10.0)
else:
timeout = int(timeout * 10.0)
for x in range(timeout):
# jQuery needs a small amount of time to activate.
try:
driver.execute_script("jQuery('html')")
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
return
except Exception:
time.sleep(0.1)
def raise_unable_to_load_jquery_exception(driver):
""" The most-likely reason for jQuery not loading on web pages. """
raise Exception(
'''Unable to load jQuery on "%s" due to a possible violation '''
'''of the website's Content Security Policy directive. '''
'''To override this policy, add "--disable-csp" on the '''
'''command-line when running your tests.''' % driver.current_url)
def activate_jquery(driver):
""" If "jQuery is not defined", use this method to activate it for use.
This happens because jQuery is not always defined on web sites. """
try:
# Let's first find out if jQuery is already defined.
driver.execute_script("jQuery('html')")
# Since that command worked, jQuery is defined. Let's return.
return
except Exception:
# jQuery is not currently defined. Let's proceed by defining it.
pass
jquery_js = constants.JQuery.MIN_JS
activate_jquery_script = (
'''var script = document.createElement('script');'''
'''script.src = "%s";document.getElementsByTagName('head')[0]'''
'''.appendChild(script);''' % jquery_js)
driver.execute_script(activate_jquery_script)
for x in range(int(settings.MINI_TIMEOUT * 10.0)):
# jQuery needs a small amount of time to activate.
try:
driver.execute_script("jQuery('html')")
return
except Exception:
time.sleep(0.1)
# Since jQuery still isn't activating, give up and raise an exception
raise_unable_to_load_jquery_exception(driver)
def are_quotes_escaped(string):
if (string.count("\\'") != string.count("'") or (
string.count('\\"') != string.count('"'))):
return True
return False
def escape_quotes_if_needed(string):
"""
re.escape() works differently in Python 3.7.0 than earlier versions:
Python 3.6.5:
>>> import re
>>> re.escape('"')
'\\"'
Python 3.7.0:
>>> import re
>>> re.escape('"')
'"'
SeleniumBase needs quotes to be properly escaped for Javascript calls.
"""
if are_quotes_escaped(string):
if string.count("'") != string.count("\\'"):
string = string.replace("'", "\\'")
if string.count('"') != string.count('\\"'):
string = string.replace('"', '\\"')
return string
def safe_execute_script(driver, script):
""" When executing a script that contains a jQuery command,
it's important that the jQuery library has been loaded first.
This method will load jQuery if it wasn't already loaded. """
try:
driver.execute_script(script)
except Exception:
# The likely reason this fails is because: "jQuery is not defined"
activate_jquery(driver) # It's a good thing we can define it here
driver.execute_script(script)
def wait_for_css_query_selector(
driver, selector, timeout=settings.SMALL_TIMEOUT):
element = None
start_ms = time.time() * 1000.0
stop_ms = start_ms + (timeout * 1000.0)
for x in range(int(timeout * 10)):
try:
selector = re.escape(selector)
selector = escape_quotes_if_needed(selector)
element = driver.execute_script(
"""return document.querySelector('%s')""" % selector)
if element:
return element
except Exception:
element = None
if not element:
now_ms = time.time() * 1000.0
if now_ms >= stop_ms:
break
time.sleep(0.1)
raise Exception(
"Element {%s} was not present after %s seconds!" % (
selector, timeout))
def highlight_with_js(driver, selector, loops, o_bs):
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 128, 128, 0.5)';"""
% selector)
try:
driver.execute_script(script)
except Exception:
return
for n in range(loops):
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(255, 0, 0, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 0, 128, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(0, 0, 255, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(0, 255, 0, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 128, 0, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 0, 128, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'%s';"""
% (selector, o_bs))
driver.execute_script(script)
def highlight_with_jquery(driver, selector, loops, o_bs):
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 128, 128, 0.5)');""" % selector
safe_execute_script(driver, script)
for n in range(loops):
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(255, 0, 0, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 0, 128, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(0, 0, 255, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(0, 255, 0, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 128, 0, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 0, 128, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow', '%s');""" % (selector, o_bs)
driver.execute_script(script)
def add_css_link(driver, css_link):
script_to_add_css = (
"""function injectCSS(css) {
var head = document.getElementsByTagName("head")[0];
var link = document.createElement("link");
link.rel = "stylesheet";
link.type = "text/css";
link.href = css;
link.crossorigin = "anonymous";
head.appendChild(link);
}
injectCSS("%s");""")
css_link = escape_quotes_if_needed(css_link)
driver.execute_script(script_to_add_css % css_link)
def add_js_link(driver, js_link):
script_to_add_js = (
"""function injectJS(link) {
var body = document.getElementsByTagName("body")[0];
var script = document.createElement("script");
script.src = link;
script.defer;
script.type="text/javascript";
script.crossorigin = "anonymous";
script.onload = function() { null };
body.appendChild(script);
}
injectJS("%s");""")
js_link = escape_quotes_if_needed(js_link)
driver.execute_script(script_to_add_js % js_link)
def add_css_style(driver, css_style):
add_css_style_script = (
"""function injectStyle(css) {
var head = document.getElementsByTagName("head")[0];
var style = document.createElement("style");
style.type = "text/css";
style.appendChild(document.createTextNode(css));
head.appendChild(style);
}
injectStyle("%s");""")
css_style = css_style.replace('\n', '')
css_style = escape_quotes_if_needed(css_style)
driver.execute_script(add_css_style_script % css_style)
def add_js_code_from_link(driver, js_link):
if js_link.startswith("//"):
js_link = "http:" + js_link
js_code = requests.get(js_link).text
add_js_code_script = (
'''var body = document.getElementsByTagName('body').item(0);'''
'''var script = document.createElement("script");'''
'''script.type = "text/javascript";'''
'''script.onload = function() { null };'''
'''script.appendChild(document.createTextNode("%s"));'''
'''body.appendChild(script);''')
js_code = js_code.replace('\n', ' ')
js_code = escape_quotes_if_needed(js_code)
driver.execute_script(add_js_code_script % js_code)
def add_js_code(driver, js_code):
add_js_code_script = (
'''var body = document.getElementsByTagName('body').item(0);'''
'''var script = document.createElement("script");'''
'''script.type = "text/javascript";'''
'''script.onload = function() { null };'''
'''script.appendChild(document.createTextNode("%s"));'''
'''body.appendChild(script);''')
js_code = js_code.replace('\n', ' ')
js_code = escape_quotes_if_needed(js_code)
driver.execute_script(add_js_code_script % js_code)
def add_meta_tag(driver, http_equiv=None, content=None):
if http_equiv is None:
http_equiv = "Content-Security-Policy"
if content is None:
content = ("default-src *; style-src 'self' 'unsafe-inline'; "
"script-src: 'self' 'unsafe-inline' 'unsafe-eval'")
script_to_add_meta = (
"""function injectMeta() {
var meta = document.createElement('meta');
meta.httpEquiv = "%s";
meta.content = "%s";
document.getElementsByTagName('head')[0].appendChild(meta);
}
injectMeta();""" % (http_equiv, content))
driver.execute_script(script_to_add_meta)
def is_jquery_confirm_activated(driver):
try:
driver.execute_script("jconfirm") # Fails if jq_confirm is not defined
return True
except Exception:
return False
def activate_jquery_confirm(driver):
jquery_js = constants.JQuery.MIN_JS
jq_confirm_css = constants.JqueryConfirm.MIN_CSS
jq_confirm_js = constants.JqueryConfirm.MIN_JS
if not is_jquery_activated(driver):
add_js_link(driver, jquery_js)
wait_for_jquery_active(driver, timeout=0.9)
add_css_link(driver, jq_confirm_css)
add_js_link(driver, jq_confirm_js)
for x in range(15):
# jQuery-Confirm needs a small amount of time to load & activate.
try:
driver.execute_script("jconfirm")
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
return
except Exception:
time.sleep(0.1)
def activate_html_inspector(driver):
jquery_js = constants.JQuery.MIN_JS
html_inspector_js = constants.HtmlInspector.MIN_JS
if is_html_inspector_activated(driver):
return
if not is_jquery_activated(driver):
add_js_link(driver, jquery_js)
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
wait_for_jquery_active(driver, timeout=1.5)
add_js_link(driver, html_inspector_js)
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
for x in range(15):
# HTML-Inspector needs a small amount of time to load & activate.
try:
driver.execute_script("HTMLInspector")
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
return
except Exception:
time.sleep(0.1)
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
def activate_messenger(driver):
jquery_js = constants.JQuery.MIN_JS
messenger_css = constants.Messenger.MIN_CSS
messenger_js = constants.Messenger.MIN_JS
msgr_theme_flat_js = constants.Messenger.THEME_FLAT_JS
msgr_theme_future_js = constants.Messenger.THEME_FUTURE_JS
msgr_theme_flat_css = constants.Messenger.THEME_FLAT_CSS
msgr_theme_future_css = constants.Messenger.THEME_FUTURE_CSS
msgr_theme_block_css = constants.Messenger.THEME_BLOCK_CSS
msgr_theme_air_css = constants.Messenger.THEME_AIR_CSS
msgr_theme_ice_css = constants.Messenger.THEME_ICE_CSS
spinner_css = constants.Messenger.SPINNER_CSS
underscore_js = constants.Underscore.MIN_JS
msg_style = ("Messenger.options = {'maxMessages': 8, "
"extraClasses: 'messenger-fixed "
"messenger-on-bottom messenger-on-right', "
"theme: 'flat'}")
if not is_jquery_activated(driver):
add_js_link(driver, jquery_js)
wait_for_jquery_active(driver, timeout=0.9)
add_css_link(driver, messenger_css)
add_css_link(driver, msgr_theme_flat_css)
add_css_link(driver, msgr_theme_future_css)
add_css_link(driver, msgr_theme_block_css)
add_css_link(driver, msgr_theme_air_css)
add_css_link(driver, msgr_theme_ice_css)
add_js_link(driver, underscore_js)
add_css_link(driver, spinner_css)
add_js_link(driver, messenger_js)
add_js_link(driver, msgr_theme_flat_js)
add_js_link(driver, msgr_theme_future_js)
from seleniumbase.core import style_sheet
add_css_style(driver, style_sheet.messenger_style)
for x in range(int(settings.MINI_TIMEOUT * 10.0)):
# Messenger needs a small amount of time to load & activate.
try:
driver.execute_script(msg_style)
wait_for_ready_state_complete(driver)
wait_for_angularjs(driver)
return
except Exception:
time.sleep(0.1)
def set_messenger_theme(driver, theme="default", location="default",
max_messages="default"):
if theme == "default":
theme = "flat"
if location == "default":
location = "bottom_right"
if sb_config.mobile_emulator:
location = "top_center"
if max_messages == "default":
max_messages = "8"
valid_themes = ['flat', 'future', 'block', 'air', 'ice']
if theme not in valid_themes:
raise Exception("Theme: %s is not in %s!" % (theme, valid_themes))
valid_locations = (['top_left', 'top_center', 'top_right'
'bottom_left', 'bottom_center', 'bottom_right'])
if location not in valid_locations:
raise Exception(
"Location: %s is not in %s!" % (location, valid_locations))
if location == 'top_left':
messenger_location = "messenger-on-top messenger-on-left"
elif location == 'top_center':
messenger_location = "messenger-on-top"
elif location == 'top_right':
messenger_location = "messenger-on-top messenger-on-right"
elif location == 'bottom_left':
messenger_location = "messenger-on-bottom messenger-on-left"
elif location == 'bottom_center':
messenger_location = "messenger-on-bottom"
elif location == 'bottom_right':
messenger_location = "messenger-on-bottom messenger-on-right"
msg_style = ("Messenger.options = {'maxMessages': %s, "
"extraClasses: 'messenger-fixed %s', theme: '%s'}"
% (max_messages, messenger_location, theme))
try:
driver.execute_script(msg_style)
except Exception:
activate_messenger(driver)
driver.execute_script(msg_style)
time.sleep(0.1)
def post_message(driver, message, msg_dur, style="info"):
""" A helper method to post a message on the screen with Messenger.
(Should only be called from post_message() in base_case.py) """
if not msg_dur:
msg_dur = settings.DEFAULT_MESSAGE_DURATION
msg_dur = float(msg_dur)
message = re.escape(message)
message = escape_quotes_if_needed(message)
messenger_script = ('''Messenger().post({message: "%s", type: "%s", '''
'''hideAfter: %s, hideOnNavigate: true});'''
% (message, style, msg_dur))
try:
driver.execute_script(messenger_script)
except Exception:
activate_messenger(driver)
set_messenger_theme(driver)
try:
driver.execute_script(messenger_script)
except Exception:
time.sleep(0.2)
activate_messenger(driver)
time.sleep(0.2)
set_messenger_theme(driver)
time.sleep(0.5)
driver.execute_script(messenger_script)
def post_messenger_success_message(driver, message, msg_dur):
if not msg_dur:
msg_dur = settings.DEFAULT_MESSAGE_DURATION
msg_dur = float(msg_dur)
try:
theme = "flat"
location = "bottom_right"
if sb_config.mobile_emulator:
location = "top_center"
set_messenger_theme(driver, theme=theme, location=location)
post_message(
driver, message, msg_dur, style="success")
time.sleep(msg_dur + 0.07)
except Exception:
pass
def post_messenger_error_message(driver, message, msg_dur):
if not msg_dur:
msg_dur = settings.DEFAULT_MESSAGE_DURATION
msg_dur = float(msg_dur)
try:
set_messenger_theme(driver, theme="block", location="top_center")
post_message(
driver, message, msg_dur, style="error")
time.sleep(msg_dur + 0.07)
except Exception:
pass
def highlight_with_js_2(driver, message, selector, o_bs, msg_dur):
if selector == "html":
selector = "body"
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 128, 128, 0.5)';"""
% selector)
try:
driver.execute_script(script)
except Exception:
return
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(205, 30, 0, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(128, 0, 128, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(50, 50, 128, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
script = ("""document.querySelector('%s').style.boxShadow =
'0px 0px 6px 6px rgba(50, 205, 50, 1)';"""
% selector)
driver.execute_script(script)
time.sleep(0.0181)
post_messenger_success_message(driver, message, msg_dur)
script = ("""document.querySelector('%s').style.boxShadow =
'%s';""" % (selector, o_bs))
driver.execute_script(script)
def highlight_with_jquery_2(driver, message, selector, o_bs, msg_dur):
if selector == "html":
selector = "body"
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 128, 128, 0.5)');""" % selector
try:
safe_execute_script(driver, script)
except Exception:
return
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(205, 30, 0, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(128, 0, 128, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(50, 50, 200, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
script = """jQuery('%s').css('box-shadow',
'0px 0px 6px 6px rgba(50, 205, 50, 1)');""" % selector
driver.execute_script(script)
time.sleep(0.0181)
post_messenger_success_message(driver, message, msg_dur)
script = """jQuery('%s').css('box-shadow', '%s');""" % (selector, o_bs)
driver.execute_script(script)
def get_scroll_distance_to_element(driver, element):
try:
scroll_position = driver.execute_script("return window.scrollY;")
element_location = None
element_location = element.location['y']
element_location = element_location - 130
if element_location < 0:
element_location = 0
distance = element_location - scroll_position
return distance
except Exception:
return 0
def scroll_to_element(driver, element):
element_location = None
try:
element_location = element.location['y']
except Exception:
# element.location_once_scrolled_into_view # Old hack
return False
element_location = element_location - 130
if element_location < 0:
element_location = 0
scroll_script = "window.scrollTo(0, %s);" % element_location
# The old jQuery scroll_script required by=By.CSS_SELECTOR
# scroll_script = "jQuery('%s')[0].scrollIntoView()" % selector
try:
driver.execute_script(scroll_script)
return True
except Exception:
return False
def slow_scroll_to_element(driver, element, browser):
if browser == 'ie':
# IE breaks on slow-scrolling. Do a fast scroll instead.
scroll_to_element(driver, element)
return
scroll_position = driver.execute_script("return window.scrollY;")
element_location = None
try:
element_location = element.location['y']
except Exception:
element.location_once_scrolled_into_view
return
element_location = element_location - 130
if element_location < 0:
element_location = 0
distance = element_location - scroll_position
if distance != 0:
total_steps = int(abs(distance) / 50.0) + 2.0
step_value = float(distance) / total_steps
new_position = scroll_position
for y in range(int(total_steps)):
time.sleep(0.011)
new_position += step_value
scroll_script = "window.scrollTo(0, %s);" % new_position
driver.execute_script(scroll_script)
time.sleep(0.01)
scroll_script = "window.scrollTo(0, %s);" % element_location
driver.execute_script(scroll_script)
time.sleep(0.01)
if distance > 430 or distance < -300:
# Add small recovery time for long-distance slow-scrolling
time.sleep(0.162)
else:
time.sleep(0.045)
def get_drag_and_drop_script():
script = (r"""(function( $ ) {
$.fn.simulateDragDrop = function(options) {
return this.each(function() {
new $.simulateDragDrop(this, options);
});
};
$.simulateDragDrop = function(elem, options) {
this.options = options;
this.simulateEvent(elem, options);
};
$.extend($.simulateDragDrop.prototype, {
simulateEvent: function(elem, options) {
/*Simulating drag start*/
var type = 'dragstart';
var event = this.createEvent(type);
this.dispatchEvent(elem, type, event);
/*Simulating drop*/
type = 'drop';
var dropEvent = this.createEvent(type, {});
dropEvent.dataTransfer = event.dataTransfer;
this.dispatchEvent(
$(options.dropTarget)[0], type, dropEvent);
/*Simulating drag end*/
type = 'dragend';
var dragEndEvent = this.createEvent(type, {});
dragEndEvent.dataTransfer = event.dataTransfer;
this.dispatchEvent(elem, type, dragEndEvent);
},
createEvent: function(type) {
var event = document.createEvent("CustomEvent");
event.initCustomEvent(type, true, true, null);
event.dataTransfer = {
data: {
},
setData: function(type, val){
this.data[type] = val;
},
getData: function(type){
return this.data[type];
}
};
return event;
},
dispatchEvent: function(elem, type, event) {
if(elem.dispatchEvent) {
elem.dispatchEvent(event);
}else if( elem.fireEvent ) {
elem.fireEvent("on"+type, event);
}
}
});
})(jQuery);""")
return script
def clear_out_console_logs(driver):
try:
# Clear out the current page log before navigating to a new page
# (To make sure that assert_no_js_errors() uses current results)
driver.get_log('browser')
except Exception:
pass
@decorators.deprecated("Use re.escape() instead, which does what you want!")
def _jq_format(code):
"""
DEPRECATED - Use re.escape() instead, which performs the intended action.
Use before throwing raw code such as 'div[tab="advanced"]' into jQuery.
Selectors with quotes inside of quotes would otherwise break jQuery.
If you just want to escape quotes, there's escape_quotes_if_needed().
This is similar to "json.dumps(value)", but with one less layer of quotes.
"""
code = code.replace('\\', '\\\\').replace('\t', '\\t').replace('\n', '\\n')
code = code.replace('\"', '\\\"').replace('\'', '\\\'')
code = code.replace('\v', '\\v').replace('\a', '\\a').replace('\f', '\\f')
code = code.replace('\b', '\\b').replace(r'\u', '\\u').replace('\r', '\\r')
return code
| 37.247596
| 79
| 0.600355
|
5a48c018059313f6af4335c5360cf3b9e972a5c4
| 5,774
|
py
|
Python
|
rltk/similarity/tf_idf.py
|
ckxz105/rltk
|
2d08269002c00c0218421c8c2dc0cc7c4f677131
|
[
"MIT"
] | 98
|
2017-03-07T22:59:41.000Z
|
2022-02-02T16:10:40.000Z
|
rltk/similarity/tf_idf.py
|
ckxz105/rltk
|
2d08269002c00c0218421c8c2dc0cc7c4f677131
|
[
"MIT"
] | 26
|
2017-04-25T17:25:22.000Z
|
2021-09-10T16:57:05.000Z
|
rltk/similarity/tf_idf.py
|
ckxz105/rltk
|
2d08269002c00c0218421c8c2dc0cc7c4f677131
|
[
"MIT"
] | 31
|
2017-03-09T22:40:40.000Z
|
2022-03-11T16:28:23.000Z
|
import collections
import math
import rltk.utils as utils
def tf_idf_similarity(bag1, bag2, df_corpus, doc_size, math_log=False):
"""
Computes TF/IDF measure. This measure employs the notion of TF/IDF score commonly used in information retrieval (IR) to find documents that are relevant to keyword queries. The intuition underlying the TF/IDF measure is that two strings are similar if they share distinguishing terms.
Note:
If you will call this function many times, :meth:`TF_IDF` is more efficient.
Args:
bag1 (list): Bag 1.
bag2 (list): Bag 2.
df_corpus (dict): The pre calculated document frequency of corpus.
doc_size (int): total documents used in corpus.
math_log (bool, optional): Flag to indicate whether math.log() should be used in TF and IDF formulas. Defaults to False.
Returns:
float: TF/IDF cosine similarity.
Examples:
>>> rltk.tfidf(['a', 'b', 'a'], ['a', 'c'], {'a':3, 'b':1, 'c':1}, 3)
0.17541160386140586
>>> rltk.tfidf(['a', 'b', 'a'], ['a', 'c'], {'a':3, 'b':2, 'c':1}, 4, True)
0.12977804138
>>> rltk.tfidf(['a', 'b', 'a'], ['a'], {'a':3, 'b':1, 'c':1}, 3)
0.5547001962252291
"""
# http://www.tfidf.com/
utils.check_for_none(bag1, bag2, df_corpus)
utils.check_for_type(list, bag1, bag2)
# term frequency for input strings
t_x, t_y = collections.Counter(bag1), collections.Counter(bag2)
tf_x = {k: float(v) / len(bag1) for k, v in t_x.items()}
tf_y = {k: float(v) / len(bag2) for k, v in t_y.items()}
# unique element
total_unique_elements = set()
total_unique_elements.update(bag1)
total_unique_elements.update(bag2)
idf_element, v_x, v_y, v_x_y, v_x_2, v_y_2 = 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
# tfidf calculation
for element in total_unique_elements:
if element not in df_corpus:
continue
idf_element = doc_size * 1.0 / df_corpus[element]
v_x = 0 if element not in tf_x else (math.log(idf_element) * tf_x[element]) if math_log else (
idf_element * tf_x[element])
v_y = 0 if element not in tf_y else (math.log(idf_element) * tf_y[element]) if math_log else (
idf_element * tf_y[element])
v_x_y += v_x * v_y
v_x_2 += v_x * v_x
v_y_2 += v_y * v_y
# cosine similarity
return 0.0 if v_x_y == 0 else v_x_y / (math.sqrt(v_x_2) * math.sqrt(v_y_2))
def compute_tf(tokens):
"""
Compute TF (Term Frequency)
Args:
tokens (list): tokens
"""
terms = collections.Counter(tokens)
return {k: float(v) / len(tokens) for k, v in terms.items()}
def compute_idf(df_corpus, doc_size, math_log=False):
"""
Compute IDF (Inverted Document Frequency)
Args:
df_corpus (dict): terms in document
doc_size (int): total document size
math_log (bool): logarithm of the result
"""
return {k: doc_size * 1.0 / v if math_log is False \
else math.log(doc_size * 1.0 / v) \
for k, v in df_corpus.items()}
def tf_idf_cosine_similarity(tfidf_dict1, tfidf_dict2):
"""
Compute Cosine similarity for TF/IDF value dictionary
Args:
tfidf_dict1 (dict): TF/IDF dictionary for first record, format in ``{term1: tfidf value, ...}``
tfidf_dict2 (dict): TF/IDF dictionary for second record, same format as tfidf_dict1.
Returns:
float:
"""
v_x_y, v_x_2, v_y_2 = 0.0, 0.0, 0.0
# intersection of dict1 and dict2
# ignore the values that are not in both
for t in tfidf_dict1.keys():
if t in tfidf_dict2:
v_x_y += tfidf_dict1[t] * tfidf_dict2[t]
for t, tfidf in tfidf_dict1.items():
v_x_2 += tfidf * tfidf
for t, tfidf in tfidf_dict2.items():
v_y_2 += tfidf * tfidf
# cosine similarity
return 0.0 if v_x_y == 0 else v_x_y / (math.sqrt(v_x_2) * math.sqrt(v_y_2))
class TF_IDF():
"""
TF/IDF helper class (An efficient implementation)
Examples::
# initialization
tfidf = TF_IDF()
# add document
tfidf.add_document('id1', ['a', 'b', 'a'])
tfidf.add_document('id2', ['b', 'c'])
tfidf.add_document('id3', ['b', 'd'])
# compute idf
tfidf.pre_compute()
# get similarity
tfidf.similarity('id1', 'id2')
tfidf.similarity('id1', 'id3')
"""
def __init__(self):
self.tf = {}
self.df_corpus = {}
self.doc_size = 0
self.idf = 0
def add_document(self, doc_id: str, tokens: list):
"""
Add document to corpus
Args:
doc_id (str): Document (record) id.
tokens (list): List of token string.
"""
self.doc_size += 1
tf = compute_tf(tokens)
self.tf[doc_id] = tf
for k, _ in tf.items():
self.df_corpus[k] = self.df_corpus.get(k, 0) + 1
def pre_compute(self, math_log: bool = False):
"""
Pre-compute IDF score
Args:
math_log (bool, optional): Flag to indicate whether math.log() should be used in TF and IDF formulas. Defaults to False.
"""
self.idf = compute_idf(self.df_corpus, self.doc_size, math_log)
def similarity(self, id1, id2):
"""
Get similarity
Args:
id1 (str): id 1
id2 (str): id2
Returns:
float:
"""
tf_x = self.tf[id1]
tfidf_x = {k: v * self.idf[k] for k, v in tf_x.items()}
tf_y = self.tf[id2]
tfidf_y = {k: v * self.idf[k] for k, v in tf_y.items()}
return tf_idf_cosine_similarity(tfidf_x, tfidf_y)
| 31.380435
| 288
| 0.583478
|
caf0317a36e6b48f36fff4fd562e4aead090c756
| 1,122
|
py
|
Python
|
scripts/switch.py
|
vortydev/projet-avion
|
a447ee7ecc448e712b3a898352e054870160e0e2
|
[
"MIT"
] | null | null | null |
scripts/switch.py
|
vortydev/projet-avion
|
a447ee7ecc448e712b3a898352e054870160e0e2
|
[
"MIT"
] | null | null | null |
scripts/switch.py
|
vortydev/projet-avion
|
a447ee7ecc448e712b3a898352e054870160e0e2
|
[
"MIT"
] | null | null | null |
import time
from time import sleep
import RPi.GPIO as GPIO
# define switch pins
interrupteur = 12
def setup():
GPIO.setmode(GPIO.BCM)
GPIO.setup(interrupteur, GPIO.IN, GPIO.PUD_UP)
GPIO.add_event_detect(interrupteur, GPIO.RISING, callback=interrupteurCallback)
# GPIO.add_event_detect(interrupteur, GPIO.FALLING, callback=interrupteurOffCallback)
def loop():
system = False
zBuffer = time.localtime()
while (True):
interrupteurDetect(system, zBuffer)
online = GPIO.input(interrupteur)
print(online)
sleep(0.1)
def interrupteurDetect(system, zBuffer):
if GPIO.event_detected(interrupteur):
zStamp = time.localtime()
if time.asctime(zStamp) > time.asctime(zBuffer):
system = True
def interrupteurCallback(channel):
print("CALLBACK: SYSTEM ON")
# def interrupteurOffCallback(channel):
# print("CALLBACK: SYSTEM OFF")
def destroy():
GPIO.cleanup()
# main
if __name__ == "__main__":
setup()
try:
loop()
except KeyboardInterrupt: # Capture Ctrl-c, appelle destroy, puis quitte
destroy()
| 24.391304
| 89
| 0.683601
|
6dc9cf4df98d589c477473a05689c0ae6a69ed72
| 10,838
|
py
|
Python
|
fastai/widgets/image_cleaner.py
|
Whoaa512/fastai
|
b3491479cbef69f7617b25b9a24c52a0d9ecb02b
|
[
"Apache-2.0"
] | 1
|
2021-06-15T13:50:43.000Z
|
2021-06-15T13:50:43.000Z
|
fastai/widgets/image_cleaner.py
|
Whoaa512/fastai
|
b3491479cbef69f7617b25b9a24c52a0d9ecb02b
|
[
"Apache-2.0"
] | 2
|
2021-05-20T23:02:08.000Z
|
2021-09-28T05:48:00.000Z
|
fastai/widgets/image_cleaner.py
|
Whoaa512/fastai
|
b3491479cbef69f7617b25b9a24c52a0d9ecb02b
|
[
"Apache-2.0"
] | 1
|
2020-11-19T06:46:30.000Z
|
2020-11-19T06:46:30.000Z
|
from ..torch_core import *
from ..basic_train import *
from ..basic_data import *
from ..vision.data import *
from ..vision.transform import *
from ..vision.image import *
from ..callbacks.hooks import *
from ..layers import *
from ipywidgets import widgets, Layout
from IPython.display import clear_output, display
__all__ = ['DatasetFormatter', 'ImageCleaner']
class DatasetFormatter():
"Returns a dataset with the appropriate format and file indices to be displayed."
@classmethod
def from_toplosses(cls, learn, n_imgs=None, **kwargs):
"Gets indices with top losses."
train_ds, train_idxs = cls.get_toplosses_idxs(learn, n_imgs, **kwargs)
return train_ds, train_idxs
@classmethod
def get_toplosses_idxs(cls, learn, n_imgs, **kwargs):
"Sorts `ds_type` dataset by top losses and returns dataset and sorted indices."
dl = learn.data.fix_dl
if not n_imgs: n_imgs = len(dl.dataset)
_,_,top_losses = learn.get_preds(ds_type=DatasetType.Fix, with_loss=True)
idxs = torch.topk(top_losses, n_imgs)[1]
return cls.padded_ds(dl.dataset, **kwargs), idxs
def padded_ds(ll_input, size=(250, 300), resize_method=ResizeMethod.CROP, padding_mode='zeros', **kwargs):
"For a LabelList `ll_input`, resize each image to `size` using `resize_method` and `padding_mode`."
return ll_input.transform(tfms=crop_pad(), size=size, resize_method=resize_method, padding_mode=padding_mode)
@classmethod
def from_similars(cls, learn, layer_ls:list=[0, 7, 2], **kwargs):
"Gets the indices for the most similar images."
train_ds, train_idxs = cls.get_similars_idxs(learn, layer_ls, **kwargs)
return train_ds, train_idxs
@classmethod
def get_similars_idxs(cls, learn, layer_ls, **kwargs):
"Gets the indices for the most similar images in `ds_type` dataset"
hook = hook_output(learn.model[layer_ls[0]][layer_ls[1]][layer_ls[2]])
dl = learn.data.fix_dl
ds_actns = cls.get_actns(learn, hook=hook, dl=dl, **kwargs)
similarities = cls.comb_similarity(ds_actns, ds_actns, **kwargs)
idxs = cls.sort_idxs(similarities)
return cls.padded_ds(dl, **kwargs), idxs
@staticmethod
def get_actns(learn, hook:Hook, dl:DataLoader, pool=AdaptiveConcatPool2d, pool_dim:int=4, **kwargs):
"Gets activations at the layer specified by `hook`, applies `pool` of dim `pool_dim` and concatenates"
print('Getting activations...')
actns = []
learn.model.eval()
with torch.no_grad():
for (xb,yb) in progress_bar(dl):
learn.model(xb)
actns.append((hook.stored).cpu())
if pool:
pool = pool(pool_dim)
return pool(torch.cat(actns)).view(len(dl.x),-1)
else: return torch.cat(actns).view(len(dl.x),-1)
@staticmethod
def comb_similarity(t1: torch.Tensor, t2: torch.Tensor, **kwargs):
# https://github.com/pytorch/pytorch/issues/11202
"Computes the similarity function between each embedding of `t1` and `t2` matrices."
print('Computing similarities...')
w1 = t1.norm(p=2, dim=1, keepdim=True)
w2 = w1 if t2 is t1 else t2.norm(p=2, dim=1, keepdim=True)
return torch.mm(t1, t2.t()) / (w1 * w2.t()).clamp(min=1e-8)
def largest_indices(arr, n):
"Returns the `n` largest indices from a numpy array `arr`."
#https://stackoverflow.com/questions/6910641/how-do-i-get-indices-of-n-maximum-values-in-a-numpy-array
flat = arr.flatten()
indices = np.argpartition(flat, -n)[-n:]
indices = indices[np.argsort(-flat[indices])]
return np.unravel_index(indices, arr.shape)
@classmethod
def sort_idxs(cls, similarities):
"Sorts `similarities` and return the indexes in pairs ordered by highest similarity."
idxs = cls.largest_indices(similarities, len(similarities))
idxs = [(idxs[0][i], idxs[1][i]) for i in range(len(idxs[0]))]
return [e for l in idxs for e in l]
class ImageCleaner():
"Displays images for relabeling or deletion and saves changes in `path` as 'cleaned.csv'."
def __init__(self, dataset, fns_idxs, path, batch_size:int=5, duplicates=False):
self._all_images,self._batch = [],[]
self._path = path
self._batch_size = batch_size
if duplicates: self._batch_size = 2
self._duplicates = duplicates
self._labels = dataset.classes
self._all_images = self.create_image_list(dataset, fns_idxs)
self._csv_dict = {dataset.x.items[i]: dataset.y[i] for i in range(len(dataset))}
self._deleted_fns = []
self._skipped = 0
self.render()
@classmethod
def make_img_widget(cls, img, layout=Layout(), format='jpg'):
"Returns an image widget for specified file name `img`."
return widgets.Image(value=img, format=format, layout=layout)
@classmethod
def make_button_widget(cls, label, file_path=None, handler=None, style=None, layout=Layout(width='auto')):
"Return a Button widget with specified `handler`."
btn = widgets.Button(description=label, layout=layout)
if handler is not None: btn.on_click(handler)
if style is not None: btn.button_style = style
btn.file_path = file_path
btn.flagged_for_delete = False
return btn
@classmethod
def make_dropdown_widget(cls, description='Description', options=['Label 1', 'Label 2'], value='Label 1',
file_path=None, layout=Layout(), handler=None):
"Return a Dropdown widget with specified `handler`."
dd = widgets.Dropdown(description=description, options=options, value=value, layout=layout)
if file_path is not None: dd.file_path = file_path
if handler is not None: dd.observe(handler, names=['value'])
return dd
@classmethod
def make_horizontal_box(cls, children, layout=Layout()):
"Make a horizontal box with `children` and `layout`."
return widgets.HBox(children, layout=layout)
@classmethod
def make_vertical_box(cls, children, layout=Layout(), duplicates=False):
"Make a vertical box with `children` and `layout`."
if not duplicates: return widgets.VBox(children, layout=layout)
else: return widgets.VBox([children[0], children[2]], layout=layout)
def create_image_list(self, dataset, fns_idxs):
"Create a list of images, filenames and labels but first removing files that are not supposed to be displayed."
items = dataset.x.items
if self._duplicates:
chunked_idxs = chunks(fns_idxs, 2)
chunked_idxs = [chunk for chunk in chunked_idxs if Path(items[chunk[0]]).is_file() and Path(items[chunk[1]]).is_file()]
return [(dataset.x[i]._repr_jpeg_(), items[i], self._labels[dataset.y[i].data]) for chunk in chunked_idxs for i in chunk]
else:
return [(dataset.x[i]._repr_jpeg_(), items[i], self._labels[dataset.y[i].data]) for i in fns_idxs if
Path(items[i]).is_file()]
def relabel(self, change):
"Relabel images by moving from parent dir with old label `class_old` to parent dir with new label `class_new`."
class_new,class_old,file_path = change.new,change.old,change.owner.file_path
fp = Path(file_path)
parent = fp.parents[1]
self._csv_dict[fp] = class_new
def next_batch(self, _):
"Handler for 'Next Batch' button click. Delete all flagged images and renders next batch."
for img_widget, delete_btn, fp, in self._batch:
fp = delete_btn.file_path
if (delete_btn.flagged_for_delete == True):
self.delete_image(fp)
self._deleted_fns.append(fp)
self._all_images = self._all_images[self._batch_size:]
self.empty_batch()
self.render()
def on_delete(self, btn):
"Flag this image as delete or keep."
btn.button_style = "" if btn.flagged_for_delete else "danger"
btn.flagged_for_delete = not btn.flagged_for_delete
def empty_batch(self): self._batch[:] = []
def delete_image(self, file_path):
del self._csv_dict[file_path]
def empty(self):
return len(self._all_images) == 0
def get_widgets(self, duplicates):
"Create and format widget set."
widgets = []
for (img,fp,human_readable_label) in self._all_images[:self._batch_size]:
img_widget = self.make_img_widget(img, layout=Layout(height='250px', width='300px'))
dropdown = self.make_dropdown_widget(description='', options=self._labels, value=human_readable_label,
file_path=fp, handler=self.relabel, layout=Layout(width='auto'))
delete_btn = self.make_button_widget('Delete', file_path=fp, handler=self.on_delete)
widgets.append(self.make_vertical_box([img_widget, dropdown, delete_btn],
layout=Layout(width='auto', height='300px',
overflow_x="hidden"), duplicates=duplicates))
self._batch.append((img_widget, delete_btn, fp))
return widgets
def batch_contains_deleted(self):
"Check if current batch contains already deleted images."
if not self._duplicates: return False
imgs = [self._all_images[:self._batch_size][0][1], self._all_images[:self._batch_size][1][1]]
return any(img in self._deleted_fns for img in imgs)
def write_csv(self):
# Get first element's file path so we write CSV to same directory as our data
csv_path = self._path/'cleaned.csv'
with open(csv_path, 'w') as f:
csv_writer = csv.writer(f)
csv_writer.writerow(['name','label'])
for pair in self._csv_dict.items():
pair = [os.path.relpath(pair[0], self._path), pair[1]]
csv_writer.writerow(pair)
return csv_path
def render(self):
"Re-render Jupyter cell for batch of images."
clear_output()
self.write_csv()
if self.empty() and self._skipped>0:
return display(f'No images to show :). {self._skipped} pairs were '
f'skipped since at least one of the images was deleted by the user.')
elif self.empty():
return display('No images to show :)')
if self.batch_contains_deleted():
self.next_batch(None)
self._skipped += 1
else:
display(self.make_horizontal_box(self.get_widgets(self._duplicates)))
display(self.make_button_widget('Next Batch', handler=self.next_batch, style="primary"))
| 46.316239
| 134
| 0.645599
|
af097aed99b125692c2a8f2f09e7a9c7f58df30c
| 51
|
py
|
Python
|
spotdl/helpers/__init__.py
|
khjxiaogu/spotify-downloader
|
a8dcb8d998da0769bbe210f2808d16b346453c23
|
[
"MIT"
] | 4,698
|
2017-06-20T22:37:10.000Z
|
2022-03-28T13:38:07.000Z
|
spotdl/helpers/__init__.py
|
Delgan/spotify-downloader
|
8adf3e8d6b98269b1538dd91c9a44ed345c77545
|
[
"MIT"
] | 690
|
2017-06-20T20:08:42.000Z
|
2022-02-26T23:36:07.000Z
|
spotdl/helpers/__init__.py
|
Delgan/spotify-downloader
|
8adf3e8d6b98269b1538dd91c9a44ed345c77545
|
[
"MIT"
] | 741
|
2017-06-21T23:32:51.000Z
|
2022-03-07T12:11:54.000Z
|
from spotdl.helpers.spotify import SpotifyHelpers
| 17
| 49
| 0.862745
|
fb47a23c6b7ee56f435278ebab07dbf9f146508a
| 3,004
|
py
|
Python
|
experiments/where_image/architecture/activations.py
|
mtanti/where-image
|
3e232f2eb29c12e0d8ec322cdff656d68b753d19
|
[
"MIT"
] | 3
|
2017-04-05T12:20:49.000Z
|
2020-12-06T07:11:14.000Z
|
experiments/where_image/architecture/activations.py
|
mtanti/where-image
|
3e232f2eb29c12e0d8ec322cdff656d68b753d19
|
[
"MIT"
] | null | null | null |
experiments/where_image/architecture/activations.py
|
mtanti/where-image
|
3e232f2eb29c12e0d8ec322cdff656d68b753d19
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import ascii, bytes, chr, dict, filter, hex, input, int, map, next, oct, open, pow, range, round, str, super, zip
import theano
import theano.tensor as T
import math
import numpy as np
from architecture.layer import *
floatX = theano.config.floatX
##################################################################################################################################
class ScaledTanh(Layer):
#################################################################
def __init__(self, name, in_layer):
super(ScaledTanh, self).__init__(
name,
children=[in_layer],
dependents=[in_layer.name]
)
#################################################################
def compile_params(self, dependent_sizes):
[ in_size ] = dependent_sizes
return in_size
#################################################################
def _get_model(self, dependent_models):
[ in_model ] = dependent_models
return 1.7159*T.tanh(in_model*2/3)
#################################################################
def get_training_model(self, dependent_models):
return self._get_model(dependent_models)
#################################################################
def get_testing_model(self, dependent_models):
return self._get_model(dependent_models)
##################################################################################################################################
class Softmax(Layer):
#################################################################
def __init__(self, name, in_layer):
super(Softmax, self).__init__(
name,
children=[in_layer],
dependents=[in_layer.name]
)
#################################################################
def compile_params(self, dependent_sizes):
[ in_size ] = dependent_sizes
return in_size
#################################################################
def _get_model(self, dependent_models):
[ in_model ] = dependent_models
return T.nnet.softmax(in_model)
#################################################################
def get_training_model(self, dependent_models):
return self._get_model(dependent_models)
#################################################################
def get_testing_model(self, dependent_models):
return self._get_model(dependent_models)
| 40.594595
| 131
| 0.370173
|
efcd531be54b9e8098c78ce81d90713777634f86
| 1,492
|
py
|
Python
|
app/user.py
|
StidZhang/gomaku
|
af41c255fe9b4fac404acb06d5395db19c9cfb22
|
[
"MIT"
] | 1
|
2022-02-23T00:53:16.000Z
|
2022-02-23T00:53:16.000Z
|
app/user.py
|
StidZhang/gomaku
|
af41c255fe9b4fac404acb06d5395db19c9cfb22
|
[
"MIT"
] | 8
|
2019-12-09T05:37:07.000Z
|
2019-12-09T05:37:09.000Z
|
app/user.py
|
StidZhang/gomaku
|
af41c255fe9b4fac404acb06d5395db19c9cfb22
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from .db import get_db
from pymongo.collation import Collation, CollationStrength
from bson import ObjectId
def get_user_collection():
db = get_db()
return db.user
def get_user_by_name(name):
user = get_user_collection()
return user.find_one({
"username": name
}, collation=Collation(locale='en', strength=CollationStrength.SECONDARY))
def get_user_by_id(id):
user = get_user_collection()
oid = ObjectId(id)
return user.find_one({"_id": oid})
def get_users_by_ids(ids):
s = set(ids)
user = get_user_collection()
q = user.find({
'_id': {'$in': [t for t in s]},
}, {
'_id': True,
'username': True,
})
return {str(x['_id']): x['username'] for x in q }
def create_user(username, password):
u = {
"username": username,
"password": password,
"created": datetime.utcnow()
}
user = get_user_collection()
user.insert_one(u)
return u
def change_user_password(userid, password):
user = get_user_collection()
return user.find_one_and_update({
"_id": userid
}, {
"$set": {"password": password}
})
class User(object):
def __init__(self, user):
self.is_authenticated = True
self.is_active = True
self.is_anonymous = False
self.user = user
self.username = user['username']
self.oid = user['_id']
def get_id(self):
return str(self.oid)
| 21.941176
| 78
| 0.615282
|
8b63db6983a6a6844e9cf0247086f23a441eedb3
| 2,216
|
py
|
Python
|
analysis/experiment3/helpers/stats.py
|
hawkrobe/fish
|
2000e46c397f7c95bba8ecb0c6afd26013929ff8
|
[
"MIT"
] | 1
|
2015-12-11T16:51:08.000Z
|
2015-12-11T16:51:08.000Z
|
analysis/experiment3/helpers/stats.py
|
hawkrobe/fish
|
2000e46c397f7c95bba8ecb0c6afd26013929ff8
|
[
"MIT"
] | 3
|
2020-02-11T21:36:11.000Z
|
2020-11-01T21:25:17.000Z
|
analysis/experiment3/helpers/stats.py
|
hawkrobe/couzin_replication
|
ff491639954f0652d6b4b2a318477bb54c38fadf
|
[
"MIT"
] | null | null | null |
import numpy as np
def get_normal_fit(points):
"""
>>> mu = np.array([10,100])
>>> cov = np.array([[10,-5],[-5,20]])
>>> samples = np.random.multivariate_normal(mu, cov, size = 1000000)
>>> m,s = get_normal_fit(samples)
>>> assert np.linalg.norm(mu - m) < 1
>>> assert np.linalg.norm(cov - s) < 1
"""
return np.mean(points, 0), get_sample_cov(points)
def get_sample_cov(points):
"""
>>> get_sample_cov([[1,2],[3,4],[5,6],[7,8]]).shape
(2, 2)
"""
return np.cov(np.transpose(points))
def bounding_oval(points):
"""
>>> abs(bounding_oval(np.random.normal(size=[10000,2])) - 1) < 0.1
True
>>> abs(bounding_oval(2*np.random.normal(size=[10000,2])) - 4) < 0.1
True
"""
cov = get_sample_cov(points)
vals = np.linalg.eig(cov)[0]
return np.prod(np.sqrt(vals))
def ave_dist(points):
"""
>>> ave_dist(np.array([[0,1],[0,0]]))
1.0
>>> ave_dist(np.array([[0,3],[0,0],[4,0]]))
4.0
"""
dists = []
for i in range(len(points)-1):
for j in range(i+1,len(points)):
dists += [np.linalg.norm(points[i] - points[j])]
return np.mean(dists)
def random_circle(radius, n):
"""
>>> radius = 237
>>> samples = random_circle(radius, 100000)
>>> (np.sqrt(np.sum(samples**2,1)) <= radius).all()
True
>>> # make sure distribution within a random square inside the circle is uniform
>>> size = 10
>>> length = radius * np.cos(np.pi/4) - size/2
>>> x = 2 * np.random.random() * length - length
>>> y = 2 * np.random.random() * length - length
>>> assert abs(x) < length and abs(y) < length
>>> inds = (samples[:,0] > x - size/2) * (samples[:,0] < x + size/2)
>>> inds = inds * (samples[:,1] > y - size/2) * (samples[:,1] < y + size/2)
>>> assert abs(x - np.mean(samples[inds,0])) < 1
>>> assert abs(y - np.mean(samples[inds,1])) < 1
"""
theta = 2 * np.pi * np.random.random(size = n)
rad = np.sqrt(np.random.random(size = n))
x = radius * rad * np.cos(theta)
y = radius * rad * np.sin(theta)
return np.column_stack([x, y])
if __name__ == "__main__":
import doctest
doctest.testmod()
| 29.157895
| 84
| 0.54648
|
cadd9ae0f1d38b619c997a54b7eecfead7141775
| 953
|
py
|
Python
|
isi_sdk_9_0_0/test/test_snapshot_locks_extended.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_9_0_0/test/test_snapshot_locks_extended.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_9_0_0/test/test_snapshot_locks_extended.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 10
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_9_0_0
from isi_sdk_9_0_0.models.snapshot_locks_extended import SnapshotLocksExtended # noqa: E501
from isi_sdk_9_0_0.rest import ApiException
class TestSnapshotLocksExtended(unittest.TestCase):
"""SnapshotLocksExtended unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testSnapshotLocksExtended(self):
"""Test SnapshotLocksExtended"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_9_0_0.models.snapshot_locks_extended.SnapshotLocksExtended() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 23.243902
| 100
| 0.720881
|
268a944804a0d2528434261f8a31d02eae35b460
| 12,899
|
py
|
Python
|
multiagent/util.py
|
dcalacci/ml-expectimax-agent
|
1b74a1a02f98b702bf2d21e09f616b590bff481c
|
[
"MIT"
] | 2
|
2018-05-02T07:51:25.000Z
|
2020-05-11T00:55:08.000Z
|
multiagent/util.py
|
dcalacci/ml-expectimax-agent
|
1b74a1a02f98b702bf2d21e09f616b590bff481c
|
[
"MIT"
] | null | null | null |
multiagent/util.py
|
dcalacci/ml-expectimax-agent
|
1b74a1a02f98b702bf2d21e09f616b590bff481c
|
[
"MIT"
] | 2
|
2017-10-10T22:11:51.000Z
|
2019-12-11T16:05:12.000Z
|
import sys
import inspect
import heapq, random
"""
Data structures useful for implementing SearchAgents
"""
class Stack:
"A container with a last-in-first-out (LIFO) queuing policy."
def __init__(self):
self.list = []
def push(self,item):
"Push 'item' onto the stack"
self.list.append(item)
def pop(self):
"Pop the most recently pushed item from the stack"
return self.list.pop()
def isEmpty(self):
"Returns true if the stack is empty"
return len(self.list) == 0
class Queue:
"A container with a first-in-first-out (FIFO) queuing policy."
def __init__(self):
self.list = []
def push(self,item):
"Enqueue the 'item' into the queue"
self.list.insert(0,item)
def pop(self):
"""
Dequeue the earliest enqueued item still in the queue. This
operation removes the item from the queue.
"""
return self.list.pop()
def isEmpty(self):
"Returns true if the queue is empty"
return len(self.list) == 0
class PriorityQueue:
"""
Implements a priority queue data structure. Each inserted item
has a priority associated with it and the client is usually interested
in quick retrieval of the lowest-priority item in the queue. This
data structure allows O(1) access to the lowest-priority item.
Note that this PriorityQueue does not allow you to change the priority
of an item. However, you may insert the same item multiple times with
different priorities.
"""
def __init__(self):
self.heap = []
def push(self, item, priority):
pair = (priority,item)
heapq.heappush(self.heap,pair)
def pop(self):
(priority,item) = heapq.heappop(self.heap)
return item
def isEmpty(self):
return len(self.heap) == 0
class PriorityQueueWithFunction(PriorityQueue):
"""
Implements a priority queue with the same push/pop signature of the
Queue and the Stack classes. This is designed for drop-in replacement for
those two classes. The caller has to provide a priority function, which
extracts each item's priority.
"""
def __init__(self, priorityFunction):
"priorityFunction (item) -> priority"
self.priorityFunction = priorityFunction # store the priority function
PriorityQueue.__init__(self) # super-class initializer
def push(self, item):
"Adds an item to the queue with priority from the priority function"
PriorityQueue.push(self, item, self.priorityFunction(item))
def manhattanDistance( xy1, xy2 ):
"Returns the Manhattan distance between points xy1 and xy2"
return abs( xy1[0] - xy2[0] ) + abs( xy1[1] - xy2[1] )
"""
Data structures and functions useful for various course projects
The search project should not need anything below this line.
"""
class Counter(dict):
"""
A counter keeps track of counts for a set of keys.
The counter class is an extension of the standard python
dictionary type. It is specialized to have number values
(integers or floats), and includes a handful of additional
functions to ease the task of counting data. In particular,
all keys are defaulted to have value 0. Using a dictionary:
a = {}
print a['test']
would give an error, while the Counter class analogue:
>>> a = Counter()
>>> print a['test']
0
returns the default 0 value. Note that to reference a key
that you know is contained in the counter,
you can still use the dictionary syntax:
>>> a = Counter()
>>> a['test'] = 2
>>> print a['test']
2
This is very useful for counting things without initializing their counts,
see for example:
>>> a['blah'] += 1
>>> print a['blah']
1
The counter also includes additional functionality useful in implementing
the classifiers for this assignment. Two counters can be added,
subtracted or multiplied together. See below for details. They can
also be normalized and their total count and arg max can be extracted.
"""
def __getitem__(self, idx):
self.setdefault(idx, 0)
return dict.__getitem__(self, idx)
def incrementAll(self, keys, count):
"""
Increments all elements of keys by the same count.
>>> a = Counter()
>>> a.incrementAll(['one','two', 'three'], 1)
>>> a['one']
1
>>> a['two']
1
"""
for key in keys:
self[key] += count
def argMax(self):
"""
Returns the key with the highest value.
"""
if len(self.keys()) == 0: return None
all = self.items()
values = [x[1] for x in all]
maxIndex = values.index(max(values))
return all[maxIndex][0]
def sortedKeys(self):
"""
Returns a list of keys sorted by their values. Keys
with the highest values will appear first.
>>> a = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> a['third'] = 1
>>> a.sortedKeys()
['second', 'third', 'first']
"""
sortedItems = self.items()
compare = lambda x, y: sign(y[1] - x[1])
sortedItems.sort(cmp=compare)
return [x[0] for x in sortedItems]
def totalCount(self):
"""
Returns the sum of counts for all keys.
"""
return sum(self.values())
def normalize(self):
"""
Edits the counter such that the total count of all
keys sums to 1. The ratio of counts for all keys
will remain the same. Note that normalizing an empty
Counter will result in an error.
"""
total = float(self.totalCount())
if total == 0: return
for key in self.keys():
self[key] = self[key] / total
def divideAll(self, divisor):
"""
Divides all counts by divisor
"""
divisor = float(divisor)
for key in self:
self[key] /= divisor
def copy(self):
"""
Returns a copy of the counter
"""
return Counter(dict.copy(self))
def __mul__(self, y ):
"""
Multiplying two counters gives the dot product of their vectors where
each unique label is a vector element.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['second'] = 5
>>> a['third'] = 1.5
>>> a['fourth'] = 2.5
>>> a * b
14
"""
sum = 0
x = self
if len(x) > len(y):
x,y = y,x
for key in x:
if key not in y:
continue
sum += x[key] * y[key]
return sum
def __radd__(self, y):
"""
Adding another counter to a counter increments the current counter
by the values stored in the second counter.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['third'] = 1
>>> a += b
>>> a['first']
1
"""
for key, value in y.items():
self[key] += value
def __add__( self, y ):
"""
Adding two counters gives a counter with the union of all keys and
counts of the second added to counts of the first.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['third'] = 1
>>> (a + b)['first']
1
"""
addend = Counter()
for key in self:
if key in y:
addend[key] = self[key] + y[key]
else:
addend[key] = self[key]
for key in y:
if key in self:
continue
addend[key] = y[key]
return addend
def __sub__( self, y ):
"""
Subtracting a counter from another gives a counter with the union of all keys and
counts of the second subtracted from counts of the first.
>>> a = Counter()
>>> b = Counter()
>>> a['first'] = -2
>>> a['second'] = 4
>>> b['first'] = 3
>>> b['third'] = 1
>>> (a - b)['first']
-5
"""
addend = Counter()
for key in self:
if key in y:
addend[key] = self[key] - y[key]
else:
addend[key] = self[key]
for key in y:
if key in self:
continue
addend[key] = -1 * y[key]
return addend
def raiseNotDefined():
print "Method not implemented: %s" % inspect.stack()[1][3]
sys.exit(1)
def normalize(vectorOrCounter):
"""
normalize a vector or counter by dividing each value by the sum of all values
"""
normalizedCounter = Counter()
if type(vectorOrCounter) == type(normalizedCounter):
counter = vectorOrCounter
total = float(counter.totalCount())
if total == 0: return counter
for key in counter.keys():
value = counter[key]
normalizedCounter[key] = value / total
return normalizedCounter
else:
vector = vectorOrCounter
s = float(sum(vector))
if s == 0: return vector
return [el / s for el in vector]
def nSample(distribution, values, n):
if sum(distribution) != 1:
distribution = normalize(distribution)
rand = [random.random() for i in range(n)]
rand.sort()
samples = []
samplePos, distPos, cdf = 0,0, distribution[0]
while samplePos < n:
if rand[samplePos] < cdf:
samplePos += 1
samples.append(values[distPos])
else:
distPos += 1
cdf += distribution[distPos]
return samples
def sample(distribution, values = None):
if type(distribution) == Counter:
items = distribution.items()
distribution = [i[1] for i in items]
values = [i[0] for i in items]
if sum(distribution) != 1:
distribution = normalize(distribution)
choice = random.random()
i, total= 0, distribution[0]
while choice > total:
i += 1
total += distribution[i]
return values[i]
def sampleFromCounter(ctr):
items = ctr.items()
return sample([v for k,v in items], [k for k,v in items])
def getProbability(value, distribution, values):
"""
Gives the probability of a value under a discrete distribution
defined by (distributions, values).
"""
total = 0.0
for prob, val in zip(distribution, values):
if val == value:
total += prob
return total
def flipCoin( p ):
r = random.random()
return r < p
def chooseFromDistribution( distribution ):
"Takes either a counter or a list of (prob, key) pairs and samples"
if type(distribution) == dict or type(distribution) == Counter:
return sample(distribution)
r = random.random()
base = 0.0
for prob, element in distribution:
base += prob
if r <= base: return element
def nearestPoint( pos ):
"""
Finds the nearest grid point to a position (discretizes).
"""
( current_row, current_col ) = pos
grid_row = int( current_row + 0.5 )
grid_col = int( current_col + 0.5 )
return ( grid_row, grid_col )
def sign( x ):
"""
Returns 1 or -1 depending on the sign of x
"""
if( x >= 0 ):
return 1
else:
return -1
def arrayInvert(array):
"""
Inverts a matrix stored as a list of lists.
"""
result = [[] for i in array]
for outer in array:
for inner in range(len(outer)):
result[inner].append(outer[inner])
return result
def matrixAsList( matrix, value = True ):
"""
Turns a matrix into a list of coordinates matching the specified value
"""
rows, cols = len( matrix ), len( matrix[0] )
cells = []
for row in range( rows ):
for col in range( cols ):
if matrix[row][col] == value:
cells.append( ( row, col ) )
return cells
def lookup(name, namespace):
"""
Get a method or class from any imported module from its name.
Usage: lookup(functionName, globals())
"""
dots = name.count('.')
if dots > 0:
moduleName, objName = '.'.join(name.split('.')[:-1]), name.split('.')[-1]
module = __import__(moduleName)
return getattr(module, objName)
else:
modules = [obj for obj in namespace.values() if str(type(obj)) == "<type 'module'>"]
options = [getattr(module, name) for module in modules if name in dir(module)]
options += [obj[1] for obj in namespace.items() if obj[0] == name ]
if len(options) == 1: return options[0]
if len(options) > 1: raise Exception, 'Name conflict for %s'
raise Exception, '%s not found as a method or class' % name
def pause():
"""
Pauses the output stream awaiting user feedback.
"""
print "<Press enter/return to continue>"
raw_input()
## code to handle timeouts
import signal
class TimeoutFunctionException(Exception):
"""Exception to raise on a timeout"""
pass
class TimeoutFunction:
def __init__(self, function, timeout):
"timeout must be at least 1 second. WHY??"
self.timeout = timeout
self.function = function
def handle_timeout(self, signum, frame):
raise TimeoutFunctionException()
def __call__(self, *args):
old = signal.signal(signal.SIGALRM, self.handle_timeout)
signal.alarm(self.timeout)
try:
result = self.function(*args)
finally:
signal.signal(signal.SIGALRM, old)
signal.alarm(0)
return result
| 26.541152
| 88
| 0.616327
|
b275b08fc8c6fe2473736e7513a13d8b1801c7e0
| 2,347
|
py
|
Python
|
contacts/views.py
|
Gerard-007/musicalpacks
|
ef12c7281b395268ac53247fd34c3499f7a0569a
|
[
"MIT"
] | null | null | null |
contacts/views.py
|
Gerard-007/musicalpacks
|
ef12c7281b395268ac53247fd34c3499f7a0569a
|
[
"MIT"
] | 7
|
2020-06-05T20:23:21.000Z
|
2022-03-11T23:45:33.000Z
|
contacts/views.py
|
Gerard-007/musicalpacks
|
ef12c7281b395268ac53247fd34c3499f7a0569a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404, redirect, render
from django.http import HttpResponseRedirect, Http404
from .forms import contactForm
from django.conf import settings
from django.core.mail import send_mail
#Create your views here.
def contact(request):
title = 'Send us a message'
contact_form = contactForm(request.POST or None)
confirm_message = None
if contact_form.is_valid():
comment = contact_form.cleaned_data['comment']
name = contact_form.cleaned_data['name']
phone = contact_form.cleaned_data['phone']
subject = 'Contact email recieved from musicadence.com'
message = 'name: {} \n message: {} \n mobile: {}'.format(name, comment, phone)
from_email = contact_form.cleaned_data['email']
recipient_list = [settings.EMAIL_HOST_USER]
send_mail(subject, message, from_email, recipient_list, fail_silently=False)
title = 'Thanks'
confirm_message = 'Dear {} your message was sent sucessfully'.format(name)
contact_form = None
context = {'title': title, 'contact_form': contact_form, 'confirm_message': confirm_message}
template = 'contact.html'
return render(request, template, context)
# def contact(request):
# if request.method == "POST":
# name = request.POST.get("name")
# email = request.POST.get("email")
# message = request.POST.get("message")
#
# # Email ourselves the submitted contact message
# subject = 'Message from musicadence.com'
# emailFrom = settings.DEFAULT_FROM_EMAIL
# emailTo = [settings.DEFAULT_FROM_EMAIL]
#
# # Option 1
# # contact_message = "{0}, from {1} with email {2}".format(comment, name, email)
#
# # Option 2
# context = {
# 'user': name,
# 'email': email,
# 'message': message
# }
#
# contact_message = get_template('contact_message.txt').render(context)
#
# send_mail(
# subject,
# contact_message,
# emailFrom,
# emailTo,
# fail_silently=False
# )
#
# return redirect("/contact")
# return render(request, "contact.html", {})
| 34.514706
| 96
| 0.617384
|
5187e64ed0b1eaf0685f03bda418aed255eb7443
| 4,513
|
py
|
Python
|
tests/Simple/Sup/multi_plot_loss.py
|
maxiaoba/rlk
|
3e23473f6bbc59552b6b2bcd97245e024d7ca95d
|
[
"MIT"
] | 1
|
2021-09-28T21:16:54.000Z
|
2021-09-28T21:16:54.000Z
|
tests/Simple/Sup/multi_plot_loss.py
|
maxiaoba/rlkit
|
3e23473f6bbc59552b6b2bcd97245e024d7ca95d
|
[
"MIT"
] | null | null | null |
tests/Simple/Sup/multi_plot_loss.py
|
maxiaoba/rlkit
|
3e23473f6bbc59552b6b2bcd97245e024d7ca95d
|
[
"MIT"
] | null | null | null |
import csv
import os.path
import matplotlib
matplotlib.rcParams.update({'font.size': 13})
from matplotlib import pyplot as plt
import numpy as np
itr_interval = 100
max_itr = 2e4
fields = [
'evaluation/Average Returns',
# 'evaluation/Actions Max',
# 'evaluation/Actions Min',
# 'exploration/Average Returns',
# 'exploration/Returns Max',
# 'exploration/Returns Min',
'trainer/SUP LossAfter',
# 'trainer/LossBefore',
# 'trainer/LossAfter',
'trainer/KLBefore',
'trainer/KL'
]
field_names = [
'Eval Average Return',
# 'Eval Action Max',
# 'Eval Action Min',
# 'Expl Average Return',
# 'Expl Max Return',
# 'Expl Min Return',
'Sup LossAfter',
# 'LossBefore',
# 'LossAfter',
'KLBefore',
'KL',
]
itr_name = 'epoch'
min_loss = [-1000]*100
max_loss = [1000]*100
exp_name = "SimpleSupobs10"
prepath = "./Data/"+exp_name
plot_path = "./Data/"+exp_name
policies = [
'PPOhidden24',
'PPOSuphidden24',
'PPOSupVanillahidden24',
'PPOSupOnlinehidden24',
'PPOSupSep2hidden16',
'Suphidden24',
]
policy_names = policies
# policy_names = [
# 'PPO',
# 'PPO + Shared Supervised Learning',
# 'PPO + Seperated Supervised Learning',
# 'Supervised Learning'
# ]
seeds = [0,1,2]
colors = []
for pid in range(len(policies)):
colors.append('C'+str(pid))
extra_name = 'ppo'
pre_name = ''
post_name = ''
plot_names = []
for fid,field in enumerate(fields):
print(field)
fig = plt.figure(fid)
legends = []
plts = []
plot_names.append(extra_name+field_names[fid])
for (policy_index,policy) in enumerate(policies):
policy_path = pre_name+policy+post_name
Itrs = []
Losses = []
min_itr = np.inf
for trial in seeds:
file_path = prepath+'/'+policy_path+'/'+'seed'+str(trial)+'/progress.csv'
print(file_path)
if os.path.exists(file_path):
print(policy+'_'+str(trial))
itrs = []
losses = []
loss = []
with open(file_path) as csv_file:
if '\0' in open(file_path).read():
print("you have null bytes in your input file")
csv_reader = csv.reader(x.replace('\0', '') for x in csv_file)
else:
csv_reader = csv.reader(csv_file, delimiter=',')
for (i,row) in enumerate(csv_reader):
if i == 0:
entry_dict = {}
for index in range(len(row)):
entry_dict[row[index]] = index
# print(entry_dict)
else:
itr = i-1#int(float(row[entry_dict[itr_name]]))
if itr > max_itr:
break
try:
loss.append(np.clip(float(row[entry_dict[field]]),
min_loss[fid],max_loss[fid]))
except:
pass
if itr % itr_interval == 0:
itrs.append(itr)
loss = np.mean(loss)
losses.append(loss)
loss = []
if len(losses) < min_itr:
min_itr = len(losses)
Losses.append(losses)
Losses = [losses[:min_itr] for losses in Losses]
itrs = itrs[:min_itr]
Losses = np.array(Losses)
print(Losses.shape)
y = np.mean(Losses,0)
yerr = np.std(Losses,0)
plot, = plt.plot(itrs,y,colors[policy_index])
plt.fill_between(itrs,y+yerr,y-yerr,linewidth=0,
facecolor=colors[policy_index],alpha=0.3)
plts.append(plot)
legends.append(policy_names[policy_index])
plt.legend(plts,legends,loc='best')
plt.xlabel('Itr')
plt.ylabel(field_names[fid])
fig.savefig(plot_path+'/'+plot_names[fid]+'.pdf')
plt.close(fig)
| 32.702899
| 86
| 0.474186
|
2720623617e3eb6fb9e2a5b3f6f51e9f469880fa
| 23,022
|
py
|
Python
|
ideScripts/updateMakefile.py
|
poshcoe/VS-Code-STM32-IDE
|
d5509876c0642c9b1fbf53363d6640940b2a7084
|
[
"MIT"
] | 2
|
2020-06-06T16:15:57.000Z
|
2020-08-14T13:39:46.000Z
|
ideScripts/updateMakefile.py
|
poshcoe/VS-Code-STM32-IDE
|
d5509876c0642c9b1fbf53363d6640940b2a7084
|
[
"MIT"
] | 2
|
2020-06-06T16:18:43.000Z
|
2020-06-06T16:19:55.000Z
|
ideScripts/updateMakefile.py
|
poshcoe/VS-Code-STM32-IDE
|
d5509876c0642c9b1fbf53363d6640940b2a7084
|
[
"MIT"
] | null | null | null |
'''
Generate (replace existing) Makefile file in workspace folder wtih data from
original Makefile and 'c_cpp_properties.json'.
'''
import os
import datetime
from subprocess import Popen, PIPE
import utilities as utils
import templateStrings as tmpStr
import updatePaths as pth
import updateWorkspaceSources as wks
import updateBuildData as build
__version__ = utils.__version__
class MakefileStrings():
projectName = 'TARGET'
buildDir = 'BUILD_DIR'
cSources = 'C_SOURCES'
asmSources = 'ASM_SOURCES'
ldSources = 'LIBS'
cDefines = 'C_DEFS'
asmDefines = 'AS_DEFS'
cIncludes = 'C_INCLUDES'
asmIncludes = 'AS_INCLUDES'
ldIncludes = 'LIBDIR'
cFlags = 'CFLAGS'
asmFlags = 'ASFLAGS'
ldFlags = 'LDFLAGS'
class Makefile():
def __init__(self):
self.mkfStr = MakefileStrings()
self.cPStr = wks.CPropertiesStrings()
def checkMakefileFile(self):
'''
Check if 'Makefile' file exists. If it doesn't, report as error.
'''
if not utils.pathExists(utils.makefilePath):
errorMsg = "Makefile does not exist! Did CubeMX generated Makefile?\n"
errorMsg += "File name must be 'Makefile'."
utils.printAndQuit(errorMsg)
def restoreOriginalMakefile(self):
'''
Check wether current 'Makefile' has print capabilities. If it has, this means it was already altered by this script.
If it was, replace it with backup copy: 'Makefile.backup'.
If it does not have print capabilities, it is assumed 'Makefile' was regenerated with CubeMX
tool - print function is added and backup file is overwritten with this new 'Makefile'.
At the end, fresh 'Makefile' with print function should be available.
'''
if utils.pathExists(utils.makefileBackupPath):
# Makefile.backup exists, check if it is original (no print capabilities)
if self.hasPrintCapabilities(pathToMakefile=utils.makefileBackupPath):
errorMsg = "Makefile.backup exist, but looks like it was already modified!\n"
errorMsg += "Did you manually delete, replace or modify any of Makefiles? "
errorMsg += "Delete all Makefiles and regenerate with CubeMX."
utils.printAndQuit(errorMsg)
else: # OK - seems like original Makefile, replace Makefile with Makefile.backup, add print capabilities
print("Existing 'Makefile' file found (restored from '.backup').")
utils.copyAndRename(utils.makefileBackupPath, utils.makefilePath)
else: # Makefile.backup does not exist, check if current Makefile has print capabilities.
if self.hasPrintCapabilities(pathToMakefile=utils.makefilePath):
errorMsg = "Looks like Makefile was already modified! Makefile.backup does not exist.\n"
errorMsg += "Did you manually delete, replace or modify any of Makefiles? "
errorMsg += "Delete all Makefiles and regenerate with CubeMX."
utils.printAndQuit(errorMsg)
else: # Makefile looks like an original one. Create a backup copy and add print capabilities
print("Existing 'Makefile' file found (original).")
utils.copyAndRename(utils.makefilePath, utils.makefileBackupPath)
self.addMakefileCustomFunctions(pathToMakefile=utils.makefilePath)
def getMakefileData(self, makeExePath, gccExePath):
'''
Get Makefile data.
Returns data in dictionary.
'''
dataDictionaryList = {}
# project name
projectName = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.projectName)[0]
dataDictionaryList[self.mkfStr.projectName] = projectName
# dir name
buildDirName = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.buildDir)[0]
dataDictionaryList[self.mkfStr.buildDir] = buildDirName
# source files
cSourcesList = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.cSources)
dataDictionaryList[self.mkfStr.cSources] = cSourcesList
asmSourcesList = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.asmSources)
dataDictionaryList[self.mkfStr.asmSources] = asmSourcesList
ldSourcesList = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.ldSources)
# ldSourcesList = utils.stripStartOfString(ldSourcesList, '-l') # more readable without stripping
dataDictionaryList[self.mkfStr.ldSources] = ldSourcesList
# defines
asmDefinesList = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.asmDefines)
asmDefinesList = utils.stripStartOfString(asmDefinesList, '-D')
dataDictionaryList[self.mkfStr.asmDefines] = asmDefinesList
cDefinesList = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.cDefines)
cDefinesList = utils.stripStartOfString(cDefinesList, '-D')
dataDictionaryList[self.mkfStr.cDefines] = cDefinesList
# source & include directories
asmIncludesList = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.asmIncludes)
asmIncludesList = utils.stripStartOfString(asmIncludesList, '-I')
dataDictionaryList[self.mkfStr.asmIncludes] = asmIncludesList
cIncludesList = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.cIncludes)
cIncludesList = utils.stripStartOfString(cIncludesList, '-I')
dataDictionaryList[self.mkfStr.cIncludes] = cIncludesList
ldIncludesList = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.ldIncludes)
ldIncludesList = utils.stripStartOfString(ldIncludesList, '-L')
dataDictionaryList[self.mkfStr.ldIncludes] = ldIncludesList
# flags
cFlags = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.cFlags)
dataDictionaryList[self.mkfStr.cFlags] = cFlags
asmFlags = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.asmFlags)
dataDictionaryList[self.mkfStr.asmFlags] = asmFlags
ldFlags = self.getMakefileVariable(makeExePath, gccExePath, self.mkfStr.ldFlags)
dataDictionaryList[self.mkfStr.ldFlags] = ldFlags
return dataDictionaryList
def parseMakefileData(self, data, startString):
'''
Fetch and unparse data from existing Makefile (generated by CubeMX) starting with 'startString'.
'''
endOfLineChars = "\\"
startString = startString + ' = '
NOT_FOUND = -1
items = []
# find start and end of defines and
for lineIndex, line in enumerate(data):
line = line.rstrip('\n') # strip string of '\n'
startCharacter = line.find(startString)
if startCharacter != NOT_FOUND: # search for start string
# check if one-liner
if line.find(endOfLineChars) == NOT_FOUND:
line = line[len(startString):]
if len(line) != 0: # check for 'SOMETHING = ' (empty line after '=')
# not an empty line after '='
items.append(line) # strip string of start and and characters
return items
else: # multiline item in Makefile
for line2 in data[lineIndex+1:]:
line2 = line2.rstrip('\n')
if line2.find(endOfLineChars) != NOT_FOUND:
line2 = line2.rstrip('\\') # strip of '\'
line2 = line2.rstrip(' ') # strip of ' '
items.append(line2)
else:
line2 = line2.rstrip('\\') # strip of '\'
line2 = line2.rstrip(' ') # strip of ' '
items.append(line2)
return items
errorMsg = "String item '" + str(startString) + "' not found!\n"
errorMsg += "Invalid/changed Makefile or this script is outdated (change in CubeMX Makefile syntax?)."
utils.printAndQuit(errorMsg)
def createNewMakefile(self):
'''
Merge existing Makefile data and user fields from existing 'c_cpp_properties.json.'
'''
cP = wks.CProperties()
cPropertiesData = cP.getCPropertiesData()
with open(utils.makefilePath, 'r') as makefile:
data = makefile.readlines()
# sources
cSources = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_cSources)
data = self.searchAndAppend(data, self.mkfStr.cSources, cSources)
asmSources = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_asmSources)
data = self.searchAndAppend(data, self.mkfStr.asmSources, asmSources)
ldSources = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_ldSources)
data = self.searchAndAppend(data, self.mkfStr.ldSources, ldSources, preappend='-l:')
# includes
cIncludes = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_cIncludes)
data = self.searchAndAppend(data, self.mkfStr.cIncludes, cIncludes, preappend='-I')
asmIncludes = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_asmIncludes)
data = self.searchAndAppend(data, self.mkfStr.asmIncludes, asmIncludes, preappend='-I')
ldIncludes = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_ldIncludes)
data = self.searchAndAppend(data, self.mkfStr.ldIncludes, ldIncludes, preappend='-L')
# defines
cDefines = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_cDefines)
data = self.searchAndAppend(data, self.mkfStr.cDefines, cDefines, preappend='-D')
asmDefines = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_asmDefines)
data = self.searchAndAppend(data, self.mkfStr.asmDefines, asmDefines, preappend='-D')
# compiler flags
cFlags = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_cFlags)
data = self.searchAndAppend(data, self.mkfStr.cFlags, cFlags)
asmFlags = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_asmFlags)
data = self.searchAndAppend(data, self.mkfStr.asmFlags, asmFlags)
ldFlags = cP.getCPropertiesKeyData(cPropertiesData, self.cPStr.user_ldFlags)
data = self.searchAndAppend(data, self.mkfStr.ldFlags, ldFlags)
data = self.replaceMakefileHeader(data)
try:
with open(utils.makefilePath, 'w') as makefile:
for line in data:
makefile.write(line)
print("New Makefile data succesfully written.")
except Exception as err:
errorMsg = "Exception error writing new data to Makefile:\n"
errorMsg += str(err)
utils.printAndQuit(errorMsg)
def searchAndAppend(self, data, searchString, appendData, preappend=None):
'''
Search for string in 'data' list and append 'appendData' according to Makefile syntax.
if 'preappend' is defined, each item of 'appendData' is preappended with this string.
'''
NOT_FOUND = -1
if preappend is not None:
appendData = utils.preappendString(appendData, preappend)
for lineIndex, line in enumerate(data):
line = line.rstrip('\n') # strip string of '\n'
if line.find(searchString) != NOT_FOUND: # search for start string
if line[0] == '#': # this is a comment
continue
if line.find("\\") == NOT_FOUND:
# one-liner, no '\' sign at the end of the line
if isinstance(appendData, list): # if this is list
if appendData: # and it is not empty
if len(appendData) == 1: # this list has only one item, add it without '\'
if line[-1] != ' ': # avoid double spaces
line += " "
data[lineIndex] = line + appendData[0] + "\n"
else:
# this is list with multiple items, '\' will be needed
line += " \\\n"
data[lineIndex] = line
for itemIndex, item in enumerate(appendData):
stringToInsert = item
if item != appendData[-1]: # for last item do not append "\"
stringToInsert += "\\"
stringToInsert += "\n" # new line must always be added
data.insert(lineIndex + itemIndex + 1, stringToInsert)
return data
else: # appendData is string (not list)
if appendData != '':
if data[lineIndex][-1] != ' ': # avoid double spaces
data[lineIndex] += " "
data[lineIndex] += appendData + "\n"
return data
else: # already a multi-liner, append at the beginning, but in new line
if isinstance(appendData, list):
for itemIndex, item in enumerate(appendData):
stringToInsert = item + " \\\n"
data.insert(lineIndex + itemIndex + 1, stringToInsert)
else: # appendData is string (not list)
data[lineIndex] += item + " \\\n"
return data
errorMsg = "String item " + str(searchString) + " not found!"
utils.printAndQuit(errorMsg)
def searchAndCleanData(self, data, searchString):
'''
Search for string in 'data' list and clear all belonging data according to Makefile syntax.
'''
NOT_FOUND = -1
for lineIndex, line in enumerate(data):
line = line.rstrip('\n') # strip string of '\n'
if line.find(searchString) != NOT_FOUND: # search for start string
if line[0] == '#': # this is a comment
continue
if line.find("\\") == NOT_FOUND:
# keep searchString and equaliy sign, append '\n'
equalitySignCharIndex = line.find('=')
data[lineIndex] = data[lineIndex][: equalitySignCharIndex+1] + ' \n'
return data
else: # multi-liner, get last line index and delete this lines
lastLineIndex = lineIndex + 1
while data[lastLineIndex].rstrip('\n') != '':
lastLineIndex = lastLineIndex + 1
if lastLineIndex >= len(data):
errorMsg = "Unable to find end of multi-line Makefile item (" + searchString + "). "
errorMsg += "Was Makefile manually modified?"
utils.printAndQuit(errorMsg)
# delete this lines
delLineIndex = lineIndex + 1
constLineIndex = lineIndex + 1 # this line will be deleted until an empty line is present
while delLineIndex != lastLineIndex:
del data[constLineIndex]
delLineIndex = delLineIndex + 1
# keep searchString and equaliy sign, append '\n'
equalitySignCharIndex = line.find('=')
data[lineIndex] = line[: equalitySignCharIndex+1] + ' \n'
return data
errorMsg = "String item " + str(searchString) + " not found!"
utils.printAndQuit(errorMsg)
########################################################################################################################
def getMakefileVariable(self, makeExePath, gccExePath, variableName):
'''
Open subproces, call make print-variableName and catch stout.
Syntax with absolute paths:
"path to make.exe with spaces" GCC_PATH="path to gccsomething.exe with spaces" print-VARIABLE
With
'''
# change directory to the same folder as Makefile
cwd = os.getcwd()
os.chdir(utils.workspacePath)
printStatement = "print-" + str(variableName)
gccExeFolderPath = os.path.dirname(gccExePath)
#gccPath = "\"\"GCC_PATH=" + gccExeFolderPath
gccPath = "GCC_PATH=\"" + gccExeFolderPath + "\""
arguments = [makeExePath, gccPath, printStatement]
proc = Popen(arguments, stdout=PIPE)
returnString = str((proc.communicate()[0]).decode('UTF-8'))
returnString = returnString.rstrip('\n')
returnString = returnString.rstrip('\r')
os.chdir(cwd) # change directory back to where it was
if returnString.find("make: *** No rule to make target") != -1:
errorMsg = "Can't retrieve " + variableName + " value from makefile."
utils.printAndQuit(errorMsg)
# remove "VARIABLE=" string start. This string must be present, or 'Echo is off.' is displayed for empy variables.
if returnString.find(tmpStr.printMakefileDefaultString) != -1:
returnString = returnString.replace(tmpStr.printMakefileDefaultString, '')
returnStringList = returnString.split(' ') # split string to list and remove empty items
returnStringListCopy = []
for itemIndex, item in enumerate(returnStringList):
# handle strings where print statement (print-variableName) is present, like '-MF"print-VARIABLE"'
quotedPrintStatement = "\"" + printStatement + "\""
if item.find(quotedPrintStatement) != -1:
item = item.replace(quotedPrintStatement, '')
elif item.find(printStatement) != -1:
item = item.replace(printStatement, '')
# handle empty items
if item not in ['', ' ']:
returnStringListCopy.append(item)
return returnStringListCopy
def replaceMakefileHeader(self, data):
'''
Change header, to distinguish between original and new Makefile.
'''
# first find last line before '# target', that must not be changed
lastLine = None
for lineIndex, line in enumerate(data):
twoLinesAhead = data[lineIndex + 2] # first line is ######... and second should be '# target'
twoLinesAhead = twoLinesAhead.rstrip('\n') # strip string of '\n'
if twoLinesAhead.find("# target") != -1: # search for start string
lastLine = lineIndex
break
if lastLine is None:
print('') # previously there was no new line
errorMsg = "Makefile '# target' string missing.\n"
errorMsg += "Invalid/changed Makefile or this script is outdated (change in CubeMX Makefile syntax?)."
utils.printAndQuit(errorMsg)
else: # '# target' line found
# delete current header
lineIndex = 0
while lineIndex != lastLine:
lineIndex = lineIndex + 1
del data[0]
# add new header
for line in reversed(tmpStr.makefileHeader.splitlines()):
if line.find(tmpStr.versionString) != -1:
line = line.replace('***', __version__)
if line.find(tmpStr.lastRunString) != -1:
timestamp = datetime.datetime.now()
line = line.replace('***', str(timestamp))
line = line + "\n"
data.insert(0, line)
return data
def hasPrintCapabilities(self, pathToMakefile):
'''
Check wether current Makefile has 'print-variable' function.
Returns True or False.
'''
with open(pathToMakefile, 'r+') as makefile:
data = makefile.readlines()
# Try to find existing print function
for line in reversed(data):
line = line.rstrip('\n') # strip string of '\n'
if line.find(tmpStr.printMakefileVariableFunction) != -1:
# existing print function found!
return True
return False
def addMakefileCustomFunctions(self, pathToMakefile):
'''
Add all functions to makefile:
- print-variable
- clean-build-dir
This function is called only if current Makefile does not have 'print-variable' capabilities.
'''
with open(pathToMakefile, 'r+') as makefile:
makefileDataLines = makefile.readlines()
makefileDataLines = self.addPrintVariableFunction(makefileDataLines)
makefile.seek(0)
makefile.truncate()
for line in makefileDataLines:
makefile.write(line)
def addPrintVariableFunction(self, makefileDataLines):
'''
Add print Makefile variable capabilities to Makefile
'''
makefileDataLines.append("\n\n")
for line in tmpStr.printMakefileVariable.splitlines():
line = line + "\n"
makefileDataLines.append(line)
print("Makefile 'print-variable' function added.")
return makefileDataLines
########################################################################################################################
if __name__ == "__main__":
utils.verifyFolderStructure()
paths = pth.UpdatePaths()
bData = build.BuildData()
cP = wks.CProperties()
makefile = Makefile()
# Makefile must exist
makefile.checkMakefileFile() # no point in continuing if Makefile does not exist
buildData = bData.prepareBuildData()
makefile.restoreOriginalMakefile()
makeExePath = buildData[bData.bStr.buildToolsPath]
gccExePath = buildData[bData.bStr.gccExePath]
makefileData = makefile.getMakefileData(makeExePath, gccExePath)
buildData = bData.addMakefileDataToBuildDataFile(buildData, makefileData)
# get data from 'c_cpp_properties.json' and create new Makefile
cP.checkCPropertiesFile()
makefile.createNewMakefile() # reads 'c_cpp_properties.json' internally
| 45.408284
| 125
| 0.584962
|
63cc6ca3652a89b7bfd3aef0c3722d49347ec2dc
| 751
|
py
|
Python
|
deployed-devices/rest/deployments/update-deployment/update-deployment.6.x.py
|
Tshisuaka/api-snippets
|
52b50037d4af0f3b96adf76197964725a1501e96
|
[
"MIT"
] | 234
|
2016-01-27T03:04:38.000Z
|
2022-02-25T20:13:43.000Z
|
deployed-devices/rest/deployments/update-deployment/update-deployment.6.x.py
|
Tshisuaka/api-snippets
|
52b50037d4af0f3b96adf76197964725a1501e96
|
[
"MIT"
] | 351
|
2016-04-06T16:55:33.000Z
|
2022-03-10T18:42:36.000Z
|
deployed-devices/rest/deployments/update-deployment/update-deployment.6.x.py
|
Tshisuaka/api-snippets
|
52b50037d4af0f3b96adf76197964725a1501e96
|
[
"MIT"
] | 494
|
2016-03-30T15:28:20.000Z
|
2022-03-28T19:39:36.000Z
|
# Get the Python helper library from https://twilio.com/docs/libraries/python
import os
from twilio.rest import Client
# Get your Account SID and Auth Token from https://twilio.com/console
# To set up environmental variables, see http://twil.io/secure
account_sid = os.environ['TWILIO_ACCOUNT_SID']
auth_token = os.environ['TWILIO_AUTH_TOKEN']
client = Client(account_sid, auth_token)
fleet_sid = 'FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
deployment_sid = 'DLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
deployment = client.preview\
.deployed_devices\
.fleets(fleet_sid)\
.deployments(sid=deployment_sid)\
.update(friendly_name='My New Device Deployment')
print(deployment.friendly_name)
| 37.55
| 77
| 0.727031
|
b7f1f461936bdd9b99c2534e41e04ba3683a0e6e
| 21,606
|
py
|
Python
|
cogs/image.py
|
OpenRobot/JDBot
|
314d715eec7174a0199a20b516b7f5e0db9900be
|
[
"MIT"
] | 1
|
2022-03-30T16:50:07.000Z
|
2022-03-30T16:50:07.000Z
|
cogs/image.py
|
OpenRobot/JDBot
|
314d715eec7174a0199a20b516b7f5e0db9900be
|
[
"MIT"
] | null | null | null |
cogs/image.py
|
OpenRobot/JDBot
|
314d715eec7174a0199a20b516b7f5e0db9900be
|
[
"MIT"
] | null | null | null |
import discord
import sr_api
import asuna_api
import random
import io
import cairosvg
import functools
from discord.ext import commands
import utils
import jeyyapi
class Image(commands.Cog):
"A bunch of Image Manipulation and other related Image commands"
def __init__(self, bot):
self.bot = bot
@commands.command(brief="a command to slap someone", help="this sends a slap gif to the target user")
async def slap(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
if Member.id == ctx.author.id:
person = self.bot.user
target = ctx.author
if Member.id != ctx.author.id:
person = ctx.author
target = Member
asuna = asuna_api.Client(session=self.bot.session)
url = await asuna.get_gif("slap")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{person} slapped you! Ow...", icon_url=(person.display_avatar.url))
embed.set_image(url=url.url)
embed.set_footer(text="powered using the asuna.ga api")
if isinstance(ctx.channel, discord.TextChannel):
await ctx.send(content=target.mention, embed=embed)
if isinstance(ctx.channel, discord.DMChannel):
if target.dm_channel is None:
await target.create_dm()
try:
await target.send(content=target.mention, embed=embed)
except discord.Forbidden:
await ctx.author.send("Failed DM'ing them... likely didn't make us able to dm them or they blocked us.")
@commands.command(brief="a command to look up foxes", help="this known as wholesome fox to the asuna api")
async def fox2(self, ctx):
asuna = asuna_api.Client(session=self.bot.session)
url = await asuna.get_gif("wholesome_foxes")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(
name=f"{ctx.author} requested a wholesome fox picture", icon_url=(ctx.author.display_avatar.url)
)
embed.set_image(url=url.url)
embed.set_footer(text="powered using the asuna.ga api")
await ctx.send(embed=embed)
@commands.command(brief="another command to give you pat gifs", help="powered using the asuna api")
async def pat2(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
if Member.id == ctx.author.id:
person = self.bot.user
target = ctx.author
if Member.id != ctx.author.id:
person = ctx.author
target = Member
asuna = asuna_api.Client(session=self.bot.session)
url = await asuna.get_gif("pat")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{person} patted you! *pat pat pat*", icon_url=(person.display_avatar.url))
embed.set_image(url=url.url)
embed.set_footer(text="powered using the asuna.ga api")
if isinstance(ctx.channel, discord.TextChannel):
await ctx.send(content=target.mention, embed=embed)
if isinstance(ctx.channel, discord.DMChannel):
if target.dm_channel is None:
await target.create_dm()
try:
await target.send(content=target.mention, embed=embed)
except discord.Forbidden:
await ctx.author.send("Failed DM'ing them...")
@commands.command(brief="a command to give you pat gifs", help="using the sra api it gives you pat gifs")
async def pat(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
if Member.id == ctx.author.id:
person = self.bot.user
target = ctx.author
if Member.id != ctx.author.id:
person = ctx.author
target = Member
sr_client = sr_api.Client(session=self.bot.session)
image = await sr_client.get_gif("pat")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{person} patted you", icon_url=(person.display_avatar.url))
embed.set_image(url=image.url)
embed.set_footer(text="powered by some random api")
if isinstance(ctx.channel, discord.TextChannel):
await ctx.send(content=target.mention, embed=embed)
if isinstance(ctx.channel, discord.DMChannel):
if target.dm_channel is None:
await target.create_dm()
try:
await target.send(content=target.mention, embed=embed)
except discord.Forbidden:
await ctx.author.send("Failed Dming them...")
@commands.command(brief="a hug command to hug people", help="this the first command to hug.")
async def hug(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
if Member.id == ctx.author.id:
person = self.bot.user
target = ctx.author
if Member.id != ctx.author.id:
person = ctx.author
target = Member
sr_client = sr_api.Client(session=self.bot.session)
image = await sr_client.get_gif("hug")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{person} hugged you! Awwww...", icon_url=(person.display_avatar.url))
embed.set_image(url=image.url)
embed.set_footer(text="powered by some random api")
if isinstance(ctx.channel, discord.TextChannel):
await ctx.send(content=target.mention, embed=embed)
if isinstance(ctx.channel, discord.DMChannel):
if target.dm_channel is None:
await target.create_dm()
try:
await target.send(content=target.mention, embed=embed)
except discord.Forbidden:
await ctx.author.send("Failed DM'ing them...")
@commands.command(help="takes a .png attachment or your avatar and makes a triggered version.")
async def triggered(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
y = 0
embeds = []
if ctx.message.attachments:
for a in ctx.message.attachments:
if a.filename.endswith(".png"):
url = a.url
embeds.append(await utils.triggered_converter(url, ctx))
y += 1
if not a.filename.endswith(".png"):
pass
if not ctx.message.attachments or y == 0:
url = (Member.display_avatar.with_format("png")).url
embeds.append(await utils.triggered_converter(url, ctx))
menu = utils.Paginator(embeds, ctx=ctx, delete_after=True)
await menu.send()
@commands.command(
brief="uses our headpat program to pat you", help="a command that uses jeyyapi to make a headpat of you."
)
async def headpat2(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
y = 0
embeds = []
if ctx.message.attachments:
for a in ctx.message.attachments:
if a.filename.endswith(".png"):
url = a.url
embeds.append(await utils.headpat_converter(url, ctx))
y += 1
if not a.filename.endswith(".png"):
pass
if not ctx.message.attachments or y == 0:
url = (Member.display_avatar.with_format("png")).url
embeds.append(await utils.headpat_converter(url, ctx))
menu = utils.Paginator(embeds, ctx=ctx, delete_after=True)
await menu.send()
@commands.command(
brief="a hug command to hug people", help="this actually the second hug command and is quite powerful."
)
async def hug2(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
if Member.id == ctx.author.id:
person = self.bot.user
target = ctx.author
if Member.id != ctx.author.id:
person = ctx.author
target = Member
asuna = asuna_api.Client(session=self.bot.session)
url = await asuna.get_gif("hug")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{person} super hugged you!", icon_url=(person.display_avatar.url))
embed.set_image(url=url.url)
embed.set_footer(text="powered using the asuna.ga api")
if isinstance(ctx.channel, discord.TextChannel):
await ctx.send(content=target.mention, embed=embed)
if isinstance(ctx.channel, discord.DMChannel):
if target.dm_channel is None:
await target.create_dm()
try:
await target.send(content=target.mention, embed=embed)
except discord.Forbidden:
await ctx.author.send("Failed DM'ing them...")
@commands.command(
brief="a kiss command",
help="a command where you can target a user or pick yourself to get a kiss gif( I don't know why I have this)",
)
async def kiss(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
if Member.id == ctx.author.id:
person = self.bot.user
target = ctx.author
if Member.id != ctx.author.id:
person = ctx.author
target = Member
asuna = asuna_api.Client(session=self.bot.session)
url = await asuna.get_gif("kiss")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{person} kissed you", icon_url=(person.display_avatar.url))
embed.set_image(url=url.url)
embed.set_footer(text="Why did I make this command? powered using the asuna.ga api")
if isinstance(ctx.channel, discord.TextChannel):
await ctx.send(content=target.mention, embed=embed)
if isinstance(ctx.channel, discord.DMChannel):
if target.dm_channel is None:
await target.create_dm()
try:
await target.send(content=target.mention, embed=embed)
except discord.Forbidden:
await ctx.author.send("Failed Dming them...")
@commands.command(brief="a command to get a neko", help="using the asuna.ga api you will get these images")
async def neko(self, ctx):
asuna = asuna_api.Client(session=self.bot.session)
url = await asuna.get_gif("neko")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{ctx.author} requested a neko picture", icon_url=(ctx.author.display_avatar.url))
embed.set_image(url=url.url)
embed.set_footer(text="powered using the asuna.ga api")
await ctx.send(embed=embed)
@commands.command(
brief="a command to send wink gifs", wink="you select a user to send it to and it will send it to you lol"
)
async def wink(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
if Member.id == ctx.author.id:
person = self.bot.user
target = ctx.author
if Member.id != ctx.author.id:
person = ctx.author
target = Member
sr_client = sr_api.Client(session=self.bot.session)
image = await sr_client.get_gif("wink")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{person} winked at you", icon_url=(person.display_avatar.url))
embed.set_image(url=image.url)
embed.set_footer(text="powered by some random api")
if isinstance(ctx.channel, discord.TextChannel):
await ctx.send(content=target.mention, embed=embed)
if isinstance(ctx.channel, discord.DMChannel):
if target.dm_channel is None:
await target.create_dm()
try:
await target.send(content=target.mention, embed=embed)
except discord.Forbidden:
await ctx.author.send("Failed Dming them...")
@commands.command(brief="Gives you a random waifu image.")
async def waifu(self, ctx):
r = await self.bot.session.get("https://api.waifu.pics/sfw/waifu")
res = await r.json()
embed = discord.Embed(color=random.randint(0, 16777215), timestamp=(ctx.message.created_at))
embed.set_author(name=f"{ctx.author} Requested A Waifu")
embed.set_image(url=res["url"])
embed.set_footer(text="Powered by waifu.pics")
await ctx.send(embed=embed)
@commands.command(brief="Gives you a random waifu image.")
async def waifu2(self, ctx):
r = await self.bot.session.get("https://api.waifu.im/random/?is_nsfw=false&many=false&full=false")
res = await r.json()
image = res["images"][0]
embed = discord.Embed(
color=random.randint(0, 16777215), timestamp=(ctx.message.created_at), url=image["preview_url"]
)
embed.set_author(name=f"{ctx.author} Requested A Waifu")
embed.set_image(url=image["url"])
embed.set_footer(text="Powered by waifu.im")
await ctx.send(embed=embed)
@commands.command(brief="Gives you a random bonk picture")
async def bonk(self, ctx):
r = await self.bot.session.get("https://api.waifu.pics/sfw/bonk")
res = await r.json()
embed = discord.Embed(color=random.randint(0, 16777215), timestamp=(ctx.message.created_at))
embed.set_author(name=f"{ctx.author} Requested A Bonk")
embed.set_image(url=res["url"])
embed.set_footer(text="Powered by waifu.pics")
await ctx.send(embed=embed)
@commands.command(
brief="a command to send facepalm gifs", help="using some random api it sends you a facepalm gif lol"
)
async def facepalm(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
if Member.id == ctx.author.id:
person = self.bot.user
target = ctx.author
if Member.id != ctx.author.id:
person = ctx.author
target = Member
sr_client = sr_api.Client(session=self.bot.session)
image = await sr_client.get_gif("face-palm")
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{target} you made {person} facepalm", icon_url=person.display_avatar.url)
embed.set_image(url=image.url)
embed.set_footer(text="powered by some random api")
if isinstance(ctx.channel, discord.TextChannel):
await ctx.send(content=target.mention, embed=embed)
if isinstance(ctx.channel, discord.DMChannel):
if target.dm_channel is None:
await target.create_dm()
try:
await target.send(content=target.mention, embed=embed)
except discord.Forbidden:
await ctx.author.send("Failed Dming them...")
@commands.command(help="gives a random objection", aliases=["obj", "ob", "object"])
async def objection(self, ctx):
r = await self.bot.session.get("https://api.senarc.org/misc/objection")
res = await r.json()
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_author(name=f"{ctx.author} yelled OBJECTION!", icon_url=(ctx.author.display_avatar.url))
embed.set_image(url=res["url"])
embed.set_footer(text="Powered By Senarc Api!")
await ctx.send(embed=embed)
@commands.command(help="gives the truth about opinions(may offend)", aliases=["opinion"])
async def opinional(self, ctx):
r = await self.bot.session.get("https://api.senarc.org/misc/opinional")
res = await r.json()
embed = discord.Embed(title="Truth about opinions(may offend some people):", color=random.randint(0, 16777215))
embed.set_image(url=res["url"])
embed.set_footer(text="Powered by Senarc Api!")
await ctx.send(embed=embed)
@commands.command(brief="a command to send I hate spam.")
async def spam(self, ctx):
embed = discord.Embed(color=random.randint(0, 16777215))
embed.set_image(url="https://i.imgur.com/1LckTTu.gif")
await ctx.send(content="I hate spam.", embed=embed)
@commands.command(brief="gives you the milkman gif", help="you summoned the milkman oh no")
async def milk(self, ctx):
embed = discord.Embed(title="You have summoned the milkman", color=random.randint(0, 16777215))
embed.set_image(url="https://i.imgur.com/JdyaI1Y.gif")
embed.set_footer(text="his milk is delicious")
await ctx.send(embed=embed)
@commands.command(help="inverts any valid image within the sr_api")
async def invert2(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
y = 0
embeds = []
if ctx.message.attachments:
for a in ctx.message.attachments:
if a.filename.endswith(".png") or a.filename.endswith(".jpg"):
url = a.url
embeds.append(await utils.invert_converter(url, ctx))
y += 1
if not a.filename.endswith(".png") or not a.filename.endswith(".jpg"):
pass
if not ctx.message.attachments or y == 0:
url = (Member.display_avatar.with_format("png")).url
embeds.append(await utils.invert_converter(url, ctx))
menu = utils.Paginator(embeds, ctx=ctx, delete_after=True)
await menu.send()
@commands.command(help="Headpat generator :D")
async def headpat(self, ctx, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
y = 0
embeds = []
if ctx.message.attachments:
for a in ctx.message.attachments:
if a.filename.endswith(".png") or a.filename.endswith(".jpg"):
url = a.proxy_url
embeds.append(await utils.headpat_converter2(url, ctx))
y += 1
if not a.filename.endswith(".png") or not a.filename.endswith(".jpg"):
pass
if not ctx.message.attachments or y == 0:
url = (Member.display_avatar.with_format("png")).url
embeds.append(await utils.headpat_converter2(url, ctx))
menu = utils.Paginator(embeds, ctx=ctx, delete_after=True)
await menu.send()
def convert_svg(self, svg_image):
converted_bytes = cairosvg.svg2png(bytestring=svg_image, scale=6.0)
buffer = io.BytesIO(converted_bytes)
buffer.seek(0)
return discord.File(buffer, filename=f"converted.png")
@commands.command(brief="Converts svg images to png images")
async def svgconvert(self, ctx):
if ctx.message.attachments:
for a in ctx.message.attachments:
try:
convert_time = functools.partial(self.convert_svg, await a.read())
file = await self.bot.loop.run_in_executor(None, convert_time)
await ctx.send(file=file)
except Exception as e:
await ctx.send(f"couldn't convert that :( due to error: {e}")
else:
await ctx.send("you need svg attachments")
@commands.command(brief="uses dagpi to make an image of you in jail")
async def jail(self, ctx, *, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
y = 0
embeds = []
if ctx.message.attachments:
for a in ctx.message.attachments:
if a.filename.endswith(".png"):
url = a.url
embeds.append(await utils.jail_converter(url, ctx))
y += 1
if not a.filename.endswith(".png"):
pass
if not ctx.message.attachments or y == 0:
url = (Member.display_avatar.with_format("png")).url
embeds.append(await utils.jail_converter(url, ctx))
menu = utils.Paginator(embeds, ctx=ctx, disable_after=True)
await menu.send()
@commands.command(brief="inverts any valid image with jeyyapi")
async def invert(self, ctx, Member: utils.BetterMemberConverter = None):
Member = Member or ctx.author
y = 0
embeds = []
if ctx.message.attachments:
for a in ctx.message.attachments:
if a.filename.endswith(".png") or a.filename.endswith(".jpg"):
url = a.url
embeds.append(await utils.invert_converter2(url, ctx))
y += 1
if not a.filename.endswith(".png") or not a.filename.endswith(".jpg"):
pass
if not ctx.message.attachments or y == 0:
url = (Member.display_avatar.with_format("png")).url
embeds.append(await utils.invert_converter2(url, ctx))
menu = utils.Paginator(embeds, ctx=ctx, delete_after=True)
await menu.send()
@commands.command(brief="Generates ace attronetry gifs")
async def ace(self, ctx):
jeyy_client = jeyyapi.JeyyAPIClient(session=self.bot.session)
view = utils.AceView(ctx, jeyy_client)
await ctx.send(content="Please Pick a side to represent:", view=view)
async def setup(bot):
await bot.add_cog(Image(bot))
| 39.863469
| 120
| 0.614922
|
81a15133d578f5a72d9c0ac46229218634f5da94
| 2,289
|
py
|
Python
|
checkers/passman/passman_lib.py
|
C4T-BuT-S4D/innoctf-teazer-01-03-2020
|
05b5b0cf8b4a7df9f03b616996372c7d68ccd2c3
|
[
"WTFPL"
] | null | null | null |
checkers/passman/passman_lib.py
|
C4T-BuT-S4D/innoctf-teazer-01-03-2020
|
05b5b0cf8b4a7df9f03b616996372c7d68ccd2c3
|
[
"WTFPL"
] | null | null | null |
checkers/passman/passman_lib.py
|
C4T-BuT-S4D/innoctf-teazer-01-03-2020
|
05b5b0cf8b4a7df9f03b616996372c7d68ccd2c3
|
[
"WTFPL"
] | null | null | null |
import re
import requests
from checklib import *
PORT = 9171
class CheckMachine:
def __init__(self, checker: BaseChecker):
self.c = checker
@property
def url(self):
return f'http://{self.c.host}:{PORT}'
def register(self, username=None, password=None):
username = username or rnd_username()
password = password or rnd_password()
r = requests.post(f'{self.url}/register/', data={'username': username, 'password': password})
self.c.check_response(r, 'Could not register')
self.c.assert_in('login', r.url, 'Invalid page after register')
return username, password
def login(self, username, password):
sess = get_initialized_session()
r = sess.post(f'{self.url}/login/', data={'username': username, 'password': password})
self.c.check_response(r, 'Could not login')
data = self.c.get_text(r, 'Could not login')
self.c.assert_in(username, data, 'Invalid page after login')
return sess
def list_users(self, sess):
r = sess.get(f'{self.url}/users/')
self.c.check_response(r, 'Could not list users')
return self.c.get_text(r, 'Could not list users')
def add_password(self, sess, password=None):
password = password or rnd_password()
r = sess.post(f'{self.url}/add_password/', data={'password': password})
self.c.check_response(r, 'Could not add password')
self.c.assert_in('users', r.url, 'Invalid page after password')
return password
def list_passwords(self, sess, username, password):
r = sess.get(f'{self.url}/users/{username}/')
self.c.check_response(r, 'Could not get user profile')
data = self.c.get_text(r, 'Could not get user profile')
self.c.assert_in(username, data, 'Could not get user profile')
r = sess.post(f'{self.url}/users/{username}/', data={'password': password})
self.c.check_response(r, 'Could not get user passwords')
data = self.c.get_text(r, 'Could not get user passwords')
self.c.assert_in(username, data, 'Could not get user passwords')
self.c.assert_in(password, data, 'Could not get user passwords')
return re.findall(r'<li class="list-group-item">(\S*)</li>', data)
| 38.15
| 101
| 0.639581
|
befe88d4c636e1a785bf52c9e73a185c4a7722ee
| 21,962
|
py
|
Python
|
tests/test_date.py
|
ASOdesk/dateparser
|
d8050511772c30199d14cd8506d46f9c587c61a8
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_date.py
|
ASOdesk/dateparser
|
d8050511772c30199d14cd8506d46f9c587c61a8
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_date.py
|
ASOdesk/dateparser
|
d8050511772c30199d14cd8506d46f9c587c61a8
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import regex as re
import unittest
from collections import OrderedDict
from datetime import datetime, timedelta
from mock import Mock, patch
from nose_parameterized import parameterized, param
import six
import dateparser
from dateparser import date
from dateparser.date import get_last_day_of_month
from dateparser.languages.loader import LanguageDataLoader
from dateparser.languages.loader import default_language_loader
from tests import BaseTestCase
class TestDateRangeFunction(BaseTestCase):
def setUp(self):
super(TestDateRangeFunction, self).setUp()
self.result = NotImplemented
@parameterized.expand([
param(begin=datetime(2014, 6, 15), end=datetime(2014, 6, 25), expected_length=10)
])
def test_date_range(self, begin, end, expected_length):
self.when_date_range_generated(begin, end)
self.then_range_length_is(expected_length)
self.then_all_dates_in_range_are_present(begin, end)
self.then_range_is_in_ascending_order()
@parameterized.expand([
param(begin=datetime(2014, 4, 15),
end=datetime(2014, 6, 25),
expected_months=[(2014, 4), (2014, 5), (2014, 6)]),
param(begin=datetime(2014, 4, 25),
end=datetime(2014, 5, 5),
expected_months=[(2014, 4), (2014, 5)]),
param(begin=datetime(2014, 4, 5),
end=datetime(2014, 4, 25),
expected_months=[(2014, 4)]),
param(begin=datetime(2014, 4, 25),
end=datetime(2014, 6, 5),
expected_months=[(2014, 4), (2014, 5), (2014, 6)]),
])
def test_one_date_for_each_month(self, begin, end, expected_months):
self.when_date_range_generated(begin, end, months=1)
self.then_expected_months_are(expected_months)
@parameterized.expand([
'year',
'month',
'week',
'day',
'hour',
'minute',
'second',
])
def test_should_reject_easily_mistaken_dateutil_arguments(self, invalid_period):
self.when_date_range_generated(begin=datetime(2014, 6, 15),
end=datetime(2014, 6, 25),
**{invalid_period: 1})
self.then_period_was_rejected(invalid_period)
def when_date_range_generated(self, begin, end, **size):
try:
self.result = list(date.date_range(begin, end, **size))
except Exception as error:
self.error = error
def then_expected_months_are(self, expected):
self.assertEqual(expected,
[(d.year, d.month) for d in self.result])
def then_range_length_is(self, expected_length):
self.assertEqual(expected_length, len(self.result))
def then_all_dates_in_range_are_present(self, begin, end):
date_under_test = begin
while date_under_test < end:
self.assertIn(date_under_test, self.result)
date_under_test += timedelta(days=1)
def then_range_is_in_ascending_order(self):
for i in six.moves.range(len(self.result) - 1):
self.assertLess(self.result[i], self.result[i + 1])
def then_period_was_rejected(self, period):
self.then_error_was_raised(ValueError, ['Invalid argument: {}'.format(period)])
class TestGetIntersectingPeriodsFunction(BaseTestCase):
def setUp(self):
super(TestGetIntersectingPeriodsFunction, self).setUp()
self.result = NotImplemented
@parameterized.expand([
param(low=datetime(2014, 6, 15), high=datetime(2014, 6, 16), length=1)
])
def test_date_arguments_and_date_range_with_default_post_days(self, low, high, length):
self.when_intersecting_period_calculated(low, high, period_size='day')
self.then_all_dates_in_range_are_present(begin=low, end=high)
self.then_date_range_length_is(length)
@parameterized.expand([
param(low=datetime(2014, 4, 15),
high=datetime(2014, 6, 25),
expected_results=[datetime(2014, 4, 1), datetime(2014, 5, 1), datetime(2014, 6, 1)]),
param(low=datetime(2014, 4, 25),
high=datetime(2014, 5, 5),
expected_results=[datetime(2014, 4, 1), datetime(2014, 5, 1)]),
param(low=datetime(2014, 4, 5),
high=datetime(2014, 4, 25),
expected_results=[datetime(2014, 4, 1)]),
param(low=datetime(2014, 4, 25),
high=datetime(2014, 6, 5),
expected_results=[datetime(2014, 4, 1), datetime(2014, 5, 1), datetime(2014, 6, 1)]),
param(low=datetime(2014, 4, 25),
high=datetime(2014, 4, 25),
expected_results=[]),
param(low=datetime(2014, 12, 31),
high=datetime(2015, 1, 1),
expected_results=[datetime(2014, 12, 1)]),
])
def test_dates_in_the_intersecting_period_should_use_first_day_when_period_is_month(
self, low, high, expected_results
):
self.when_intersecting_period_calculated(low, high, period_size='month')
self.then_results_are(expected_results)
@parameterized.expand([
param(low=datetime(2014, 4, 15),
high=datetime(2014, 5, 15),
period_size='month',
expected_results=[datetime(2014, 4, 1), datetime(2014, 5, 1)]),
param(low=datetime(2014, 10, 30, 4, 30),
high=datetime(2014, 11, 7, 5, 20),
period_size='week',
expected_results=[datetime(2014, 10, 27), datetime(2014, 11, 3)]),
param(low=datetime(2014, 8, 13, 13, 21),
high=datetime(2014, 8, 14, 14, 7),
period_size='day',
expected_results=[datetime(2014, 8, 13), datetime(2014, 8, 14)]),
param(low=datetime(2014, 5, 11, 22, 4),
high=datetime(2014, 5, 12, 0, 5),
period_size='hour',
expected_results=[datetime(2014, 5, 11, 22, 0),
datetime(2014, 5, 11, 23, 0),
datetime(2014, 5, 12, 0, 0)]),
param(low=datetime(2014, 4, 25, 11, 11, 11),
high=datetime(2014, 4, 25, 11, 12, 11),
period_size='minute',
expected_results=[datetime(2014, 4, 25, 11, 11, 0),
datetime(2014, 4, 25, 11, 12, 0)]),
param(low=datetime(2014, 12, 31, 23, 59, 58, 500),
high=datetime(2014, 12, 31, 23, 59, 59, 600),
period_size='second',
expected_results=[datetime(2014, 12, 31, 23, 59, 58, 0),
datetime(2014, 12, 31, 23, 59, 59, 0)]),
])
def test_periods(self, low, high, period_size, expected_results):
self.when_intersecting_period_calculated(low, high, period_size=period_size)
self.then_results_are(expected_results)
@parameterized.expand([
param('years'),
param('months'),
param('days'),
param('hours'),
param('minutes'),
param('seconds'),
param('microseconds'),
param('some_period'),
])
def test_should_reject_easily_mistaken_dateutil_arguments(self, period_size):
self.when_intersecting_period_calculated(low=datetime(2014, 6, 15),
high=datetime(2014, 6, 25),
period_size=period_size)
self.then_error_was_raised(ValueError, ['Invalid period: ' + str(period_size)])
@parameterized.expand([
param(low=datetime(2014, 4, 15), high=datetime(2014, 4, 14), period_size='month'),
param(low=datetime(2014, 4, 25), high=datetime(2014, 4, 25), period_size='month'),
])
def test_empty_period(self, low, high, period_size):
self.when_intersecting_period_calculated(low, high, period_size)
self.then_period_is_empty()
def when_intersecting_period_calculated(self, low, high, period_size):
try:
self.result = list(date.get_intersecting_periods(low, high, period=period_size))
except Exception as error:
self.error = error
def then_results_are(self, expected_results):
self.assertEquals(expected_results, self.result)
def then_date_range_length_is(self, size):
self.assertEquals(size, len(self.result))
def then_all_dates_in_range_are_present(self, begin, end):
date_under_test = begin
while date_under_test < end:
self.assertIn(date_under_test, self.result)
date_under_test += timedelta(days=1)
def then_period_is_empty(self):
self.assertEquals([], self.result)
class TestParseWithFormatsFunction(BaseTestCase):
def setUp(self):
super(TestParseWithFormatsFunction, self).setUp()
self.result = NotImplemented
@parameterized.expand([
param(date_string='yesterday', date_formats=['%Y-%m-%d']),
])
def test_date_with_not_matching_format_is_not_parsed(self, date_string, date_formats):
self.when_date_is_parsed_with_formats(date_string, date_formats)
self.then_date_was_not_parsed()
@parameterized.expand([
param(date_string='25-03-14', date_formats=['%d-%m-%y'], expected_result=datetime(2014, 3, 25)),
])
def test_should_parse_date(self, date_string, date_formats, expected_result):
self.when_date_is_parsed_with_formats(date_string, date_formats)
self.then_date_was_parsed()
self.then_parsed_period_is('day')
self.then_parsed_date_is(expected_result)
@parameterized.expand([
param(date_string='09.16', date_formats=['%m.%d'], expected_month=9, expected_day=16),
])
def test_should_use_current_year_for_dates_without_year(
self, date_string, date_formats, expected_month, expected_day
):
self.given_now(2015, 2, 4)
self.when_date_is_parsed_with_formats(date_string, date_formats)
self.then_date_was_parsed()
self.then_parsed_period_is('day')
self.then_parsed_date_is(datetime(2015, expected_month, expected_day))
@parameterized.expand([
param(date_string='August 2014', date_formats=['%B %Y'], expected_year=2014, expected_month=8),
])
def test_should_use_last_day_of_month_for_dates_without_day(
self, date_string, date_formats, expected_year, expected_month
):
self.given_now(2014, 8, 12)
self.when_date_is_parsed_with_formats(date_string, date_formats)
self.then_date_was_parsed()
self.then_parsed_period_is('month')
self.then_parsed_date_is(datetime(year=expected_year,
month=expected_month,
day=get_last_day_of_month(expected_year, expected_month)))
def given_now(self, year, month, day, **time):
now = datetime(year, month, day, **time)
datetime_mock = Mock(wraps=datetime)
datetime_mock.utcnow = Mock(return_value=now)
datetime_mock.now = Mock(return_value=now)
datetime_mock.today = Mock(return_value=now)
self.add_patch(patch('dateparser.date_parser.datetime', new=datetime_mock))
self.add_patch(patch('dateparser.date.datetime', new=datetime_mock))
def when_date_is_parsed_with_formats(self, date_string, date_formats):
self.result = date.parse_with_formats(date_string, date_formats)
def then_date_was_not_parsed(self):
self.assertIsNotNone(self.result)
self.assertIsNone(self.result['date_obj'])
def then_date_was_parsed(self):
self.assertIsNotNone(self.result)
self.assertIsNotNone(self.result['date_obj'])
def then_parsed_date_is(self, date_obj):
self.assertEquals(date_obj.date(), self.result['date_obj'].date())
def then_parsed_period_is(self, period):
self.assertEquals(period, self.result['period'])
class TestDateDataParser(BaseTestCase):
def setUp(self):
super(TestDateDataParser, self).setUp()
self.parser = NotImplemented
self.result = NotImplemented
self.multiple_results = NotImplemented
@parameterized.expand([
param('10:04am EDT'),
])
def test_time_without_date_should_use_today(self, date_string):
self.given_now(2020, 7, 19)
self.given_parser()
self.when_date_string_is_parsed(date_string)
self.then_date_was_parsed()
self.then_parsed_date_is(datetime(2020, 7, 19).date())
@parameterized.expand([
# Today
param('today', days_ago=0),
param('Today', days_ago=0),
param('TODAY', days_ago=0),
param('Сегодня', days_ago=0),
param('Hoje', days_ago=0),
param('Oggi', days_ago=0),
# Yesterday
param('yesterday', days_ago=1),
param(' Yesterday \n', days_ago=1),
param('Ontem', days_ago=1),
param('Ieri', days_ago=1),
# Day before yesterday
param('the day before yesterday', days_ago=2),
param('The DAY before Yesterday', days_ago=2),
param('Anteontem', days_ago=2),
param('Avant-hier', days_ago=2),
])
def test_temporal_nouns_are_parsed(self, date_string, days_ago):
self.given_parser()
self.when_date_string_is_parsed(date_string)
self.then_date_was_parsed()
self.then_date_is_n_days_ago(days=days_ago)
def test_should_not_assume_language_too_early(self):
dates_to_parse = OrderedDict([(u'07/07/2014', datetime(2014, 7, 7).date()), # any language
(u'07.jul.2014 | 12:52', datetime(2014, 7, 7).date()), # en, es, pt, nl
(u'07.ago.2014 | 12:52', datetime(2014, 8, 7).date()), # es, it, pt
(u'07.feb.2014 | 12:52', datetime(2014, 2, 7).date()), # en, de, es, it, nl, ro
(u'07.ene.2014 | 12:52', datetime(2014, 1, 7).date())]) # es
self.given_parser(restrict_to_languages=['en', 'de', 'fr', 'it', 'pt', 'nl', 'ro', 'es', 'ru'],
allow_redetect_language=False)
self.when_multiple_dates_are_parsed(dates_to_parse.keys())
self.then_all_results_were_parsed()
self.then_parsed_dates_are(list(dates_to_parse.values()))
def test_should_enable_redetection_for_multiple_languages(self):
dates_to_parse = OrderedDict([(u'13 Ago, 2014', datetime(2014, 8, 13).date()), # es, it, pt
(u'11 Marzo, 2014', datetime(2014, 3, 11).date()), # es, it
(u'13 Septiembre, 2014', datetime(2014, 9, 13).date()), # es
(u'13 Setembro, 2014', datetime(2014, 9, 13).date()), # pt
(u'13 Março, 2014', datetime(2014, 3, 13).date())]) # pt
self.given_parser(restrict_to_languages=['es', 'it', 'pt'], allow_redetect_language=True)
self.when_multiple_dates_are_parsed(dates_to_parse.keys())
self.then_all_results_were_parsed()
self.then_parsed_dates_are(list(dates_to_parse.values()))
@parameterized.expand([
param("2014-10-09T17:57:39+00:00"),
])
def test_get_date_data_should_not_strip_timezone_info(self, date_string):
self.given_parser()
self.when_date_string_is_parsed(date_string)
self.then_date_was_parsed()
self.then_parsed_date_has_timezone()
@parameterized.expand([
param(date_string="14 giu 13", date_formats=["%y %B %d"], expected_result=datetime(2014, 6, 13)),
param(date_string="14_luglio_15", date_formats=["%y_%B_%d"], expected_result=datetime(2014, 7, 15)),
param(date_string="14_LUGLIO_15", date_formats=["%y_%B_%d"], expected_result=datetime(2014, 7, 15)),
])
def test_parse_date_using_format(self, date_string, date_formats, expected_result):
self.given_local_tz_offset(0)
self.given_parser()
self.when_date_string_is_parsed(date_string, date_formats)
self.then_date_was_parsed()
self.then_period_is('day')
self.then_parsed_datetime_is(expected_result)
@parameterized.expand([
param(date_string="2014/11/17 14:56 EDT", expected_result=datetime(2014, 11, 17, 18, 56)),
])
def test_parse_date_with_timezones_not_using_formats(self, date_string, expected_result):
self.given_local_tz_offset(0)
self.given_parser()
self.when_date_string_is_parsed(date_string)
self.then_date_was_parsed()
self.then_period_is('day')
self.then_parsed_datetime_is(expected_result)
@parameterized.expand([
param(date_string="2014/11/17 14:56 EDT",
date_formats=["%Y/%m/%d %H:%M EDT"],
expected_result=datetime(2014, 11, 17, 14, 56)),
])
def test_parse_date_with_timezones_using_formats_ignore_timezone(self, date_string, date_formats, expected_result):
self.given_local_tz_offset(0)
self.given_parser()
self.when_date_string_is_parsed(date_string, date_formats)
self.then_date_was_parsed()
self.then_period_is('day')
self.then_parsed_datetime_is(expected_result)
@parameterized.expand([
param(date_string="08-08-2014\xa018:29", expected_result=datetime(2014, 8, 8, 18, 29)),
])
def test_should_parse_with_no_break_space_in_dates(self, date_string, expected_result):
self.given_parser()
self.when_date_string_is_parsed(date_string)
self.then_date_was_parsed()
self.then_period_is('day')
self.then_parsed_datetime_is(expected_result)
def given_now(self, year, month, day, **time):
datetime_mock = Mock(wraps=datetime)
datetime_mock.utcnow = Mock(return_value=datetime(year, month, day, **time))
self.add_patch(
patch('dateparser.date_parser.datetime', new=datetime_mock)
)
def given_parser(self, restrict_to_languages=None, **params):
self.parser = date.DateDataParser(**params)
if restrict_to_languages is not None:
language_loader = LanguageDataLoader()
language_map = default_language_loader.get_language_map()
ordered_languages = OrderedDict([
(shortname, language_map[shortname])
for shortname in restrict_to_languages
])
language_loader._data = ordered_languages
self.add_patch(patch('dateparser.date.DateDataParser.language_loader', new=language_loader))
def given_local_tz_offset(self, offset):
self.add_patch(
patch.object(dateparser.timezone_parser,
'local_tz_offset',
new=timedelta(seconds=3600 * offset))
)
def when_date_string_is_parsed(self, date_string, date_formats=None):
self.result = self.parser.get_date_data(date_string, date_formats)
def when_multiple_dates_are_parsed(self, date_strings):
self.multiple_results = []
for date_string in date_strings:
try:
result = self.parser.get_date_data(date_string)
except Exception as error:
result = error
finally:
self.multiple_results.append(result)
def then_date_was_parsed(self):
self.assertIsNotNone(self.result['date_obj'])
def then_date_is_n_days_ago(self, days):
today = datetime.utcnow().date()
expected_date = today - timedelta(days=days)
self.assertEqual(expected_date, self.result['date_obj'].date())
def then_all_results_were_parsed(self):
self.assertNotIn(None, self.multiple_results)
def then_parsed_dates_are(self, expected_dates):
self.assertEqual(expected_dates, [result['date_obj'].date() for result in self.multiple_results])
def then_period_is(self, day):
self.assertEqual(day, self.result['period'])
def then_parsed_datetime_is(self, expected_datetime):
self.assertEqual(expected_datetime, self.result['date_obj'])
def then_parsed_date_is(self, expected_date):
self.assertEqual(expected_date, self.result['date_obj'].date())
def then_parsed_date_has_timezone(self):
self.assertTrue(hasattr(self.result['date_obj'], 'tzinfo'))
class TestParserInitialization(BaseTestCase):
UNKNOWN_LANGUAGES_EXCEPTION_RE = re.compile(u"Unknown language\(s\): (.+)")
def setUp(self):
super(TestParserInitialization, self).setUp()
self.result = NotImplemented
@parameterized.expand([
param(['ur', 'li'], unknown_languages=[u'ur', u'li']),
param(['ur', 'en'], unknown_languages=[u'ur']),
param(['pk'], unknown_languages=[u'pk']),
])
def test_should_raise_error_when_unknown_language_given(self, shortnames, unknown_languages):
self.when_parser_is_initialized(languages=shortnames)
self.then_languages_are_unknown(unknown_languages)
def when_parser_is_initialized(self, **params):
try:
self.parser = date.DateDataParser(**params)
except Exception as error:
self.error = error
def then_languages_are_unknown(self, unknown_languages):
self.assertIsInstance(self.error, ValueError)
match = self.UNKNOWN_LANGUAGES_EXCEPTION_RE.match(str(self.error))
self.assertTrue(match)
languages = match.group(1).split(", ")
six.assertCountEqual(self, languages, [repr(l) for l in unknown_languages])
class TestSanitizeDate(BaseTestCase):
def test_remove_year_in_russian(self):
self.assertEqual(date.sanitize_date(u'2005г.'), u'2005 ')
self.assertEqual(date.sanitize_date(u'2005 г.'), u'2005 ')
self.assertEqual(date.sanitize_date(u'Авг.'), u'Авг.')
if __name__ == '__main__':
unittest.main()
| 42.234615
| 119
| 0.641153
|
9cd2315800dcc6a3fe2ad2faf4bcc0aa1d01accc
| 6,657
|
py
|
Python
|
nengo/utils/matplotlib.py
|
HugoChateauLaurent/nengo
|
749893186ee09aa6c621a40da3ffd3878114db9c
|
[
"BSD-2-Clause"
] | null | null | null |
nengo/utils/matplotlib.py
|
HugoChateauLaurent/nengo
|
749893186ee09aa6c621a40da3ffd3878114db9c
|
[
"BSD-2-Clause"
] | null | null | null |
nengo/utils/matplotlib.py
|
HugoChateauLaurent/nengo
|
749893186ee09aa6c621a40da3ffd3878114db9c
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import absolute_import
import warnings
from distutils.version import LooseVersion
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from nengo.utils.compat import range
from nengo.utils.ensemble import tuning_curves
has_prop_cycle = LooseVersion(matplotlib.__version__) >= '1.5.0'
if has_prop_cycle:
from cycler import cycler # Dependency of MPL form 1.5.0 onward
def get_color_cycle():
if has_prop_cycle:
cycle = matplotlib.rcParams['axes.prop_cycle']
# Apparently the 'color' key may not exist, so have to fail gracefully
try:
return [prop['color'] for prop in cycle]
except KeyError:
pass # Fall back on deprecated axes.color_cycle
return matplotlib.rcParams['axes.color_cycle']
def set_color_cycle(colors, ax=None):
if has_prop_cycle:
if ax is None:
plt.rc('axes', prop_cycle=cycler('color', colors))
else:
ax.set_prop_cycle('color', colors)
else:
if ax is None:
plt.rc('axes', color_cycle=colors)
else:
ax.set_color_cycle(colors)
def axis_size(ax=None):
"""Get axis width and height in pixels.
Based on a StackOverflow response:
https://stackoverflow.com/questions/19306510/
determine-matplotlib-axis-size-in-pixels
Parameters
----------
ax : axis object
The axes to determine the size of. Defaults to current axes.
Returns
-------
width : float
Width of axes in pixels.
height : float
Height of axes in pixels.
"""
ax = plt.gca() if ax is None else ax
fig = ax.figure
bbox = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
return bbox.width * fig.dpi, bbox.height * fig.dpi
def implot(plt, x, y, Z, ax=None, colorbar=True, **kwargs):
"""Image plot of general data (like imshow but with non-pixel axes).
Parameters
----------
plt : plot object
Plot object, typically `matplotlib.pyplot`.
x : (M,) array_like
Vector of x-axis points, must be linear (equally spaced).
y : (N,) array_like
Vector of y-axis points, must be linear (equally spaced).
Z : (M, N) array_like
Matrix of data to be displayed, the value at each (x, y) point.
ax : axis object (optional)
A specific axis to plot on (defaults to `plt.gca()`).
colorbar: boolean (optional)
Whether to plot a colorbar.
**kwargs
Additional arguments for `ax.imshow`.
"""
ax = plt.gca() if ax is None else ax
def is_linear(x):
diff = np.diff(x)
return np.allclose(diff, diff[0])
assert is_linear(x) and is_linear(y)
image = ax.imshow(Z, aspect='auto', extent=(x[0], x[-1], y[-1], y[0]),
**kwargs)
if colorbar:
plt.colorbar(image, ax=ax)
def rasterplot(time, spikes, ax=None, use_eventplot=False, **kwargs): # noqa
"""Generate a raster plot of the provided spike data.
Parameters
----------
time : array
Time data from the simulation
spikes : array
The spike data with columns for each neuron and 1s indicating spikes
ax : matplotlib.axes.Axes, optional (Default: None)
The figure axes to plot into. If None, we will use current axes.
use_eventplot : boolean, optional (Default: False)
Whether to use the new Matplotlib `eventplot` routine. It is slower
and makes larger image files, so we do not use it by default.
Returns
-------
ax : matplotlib.axes.Axes
The axes that were plotted into
Examples
--------
>>> import nengo
>>> with nengo.Network() as net:
... a = nengo.Ensemble(20, 1)
... p = nengo.Probe(a.neurons)
>>> with nengo.Simulator(net) as sim:
... sim.run(1)
>>> rasterplot(sim.trange(), sim.data[p])
"""
n_times, n_neurons = spikes.shape
if ax is None:
ax = plt.gca()
if use_eventplot and not hasattr(ax, 'eventplot'):
warnings.warn("Matplotlib version %s does not have 'eventplot'. "
"Falling back to non-eventplot version."
% matplotlib.__version__)
use_eventplot = False
colors = kwargs.pop('colors', None)
if colors is None:
color_cycle = get_color_cycle()
colors = [color_cycle[i % len(color_cycle)] for i in range(n_neurons)]
# --- plotting
if use_eventplot:
spiketimes = [time[s > 0].ravel() for s in spikes.T]
for ix in range(n_neurons):
if spiketimes[ix].size == 0:
spiketimes[ix] = np.array([-np.inf])
# hack to make 'eventplot' count from 1 instead of 0
spiketimes = [np.array([-np.inf])] + spiketimes
colors = [['k']] + colors
ax.eventplot(spiketimes, colors=colors, **kwargs)
else:
kwargs.setdefault('linestyle', 'None')
kwargs.setdefault('marker', '|')
# Default markersize determined by matching eventplot
ax_height = axis_size(ax)[1]
markersize = max(ax_height * 0.8 / n_neurons, 1)
# For 1 - 3 neurons, we need an extra fudge factor to match eventplot
markersize -= max(4 - n_neurons, 0) ** 2 * ax_height * 0.005
kwargs.setdefault('markersize', markersize)
kwargs.setdefault('markeredgewidth', 1)
for i in range(n_neurons):
spiketimes = time[spikes[:, i] > 0].ravel()
ax.plot(spiketimes, np.zeros_like(spiketimes) + (i + 1),
color=colors[i], **kwargs)
# --- set axes limits
if n_times > 1:
ax.set_xlim(time[0], time[-1])
ax.set_ylim(n_neurons + 0.6, 0.4)
if n_neurons < 5:
# make sure only integer ticks for small neuron numbers
ax.set_yticks(np.arange(1, n_neurons + 1))
# --- remove ticks as these are distracting in rasters
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('none')
return ax
def plot_tuning_curves(ensemble, sim, connection=None, ax=None):
"""Plot tuning curves for the given ensemble and simulator.
If a connection is provided, the decoders will be used to set
the colours of the tuning curves.
"""
if ax is None:
ax = plt.gca()
evals, t_curves = tuning_curves(ensemble, sim)
if connection is not None:
if connection.dimensions > 1:
warnings.warn("Ignoring dimensions > 1 in plot_tuning_curves")
cm = plt.cm.ScalarMappable(cmap=plt.cm.coolwarm)
set_color_cycle(cm.to_rgba(sim.data[connection].decoders[0]), ax=ax)
ax.plot(evals, t_curves)
| 31.851675
| 78
| 0.62085
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.