hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2a354ef0049096388bccc6314647ad7b75db7cfa | 3,711 | py | Python | lit_nlp/examples/datasets/classification.py | dearbornlavern/lit | 80f4f771a93ebd6c294f29d827f3993bc830958b | [
"Apache-2.0"
] | 2 | 2020-08-14T23:06:31.000Z | 2020-08-15T15:51:24.000Z | lit_nlp/examples/datasets/classification.py | joytianya/lit | 437df8d5907efe6c1ebb10ef2a161d2749aa3522 | [
"Apache-2.0"
] | 3 | 2022-02-13T20:17:36.000Z | 2022-02-27T10:16:12.000Z | lit_nlp/examples/datasets/classification.py | joytianya/lit | 437df8d5907efe6c1ebb10ef2a161d2749aa3522 | [
"Apache-2.0"
] | 1 | 2020-08-14T23:06:34.000Z | 2020-08-14T23:06:34.000Z | # Lint as: python3
"""Text classification datasets, including single- and two-sentence tasks."""
from lit_nlp.api import dataset as lit_dataset
from lit_nlp.api import types as lit_types
import pandas as pd
import tensorflow_datasets as tfds
class MNLIDataFromTSV(lit_dataset.Dataset):
"""MultiNLI dataset, from TSV.
Compared to the TFDS version, this includes:
- label2 field for binary labels, with same schema as HANS
- genre labels, for stratified analysis
The downside is that you need to download the data from
https://gluebenchmark.com/tasks, and provide a path to the .tsv file.
"""
LABELS3 = ["contradiction", "entailment", "neutral"]
LABELS2 = ["non-entailment", "entailment"]
def binarize_label(self, label):
return "entailment" if label == "entailment" else "non-entailment"
def __init__(self, path: str):
with open(path) as fd:
df = pd.read_csv(fd, sep="\t")
# pylint: disable=g-complex-comprehension
self._examples = [{
"premise": row["sentence1"],
"hypothesis": row["sentence2"],
"label": row["gold_label"],
"label2": self.binarize_label(row["gold_label"]),
"genre": row["genre"],
} for _, row in df.iterrows()]
# pylint: enable=g-complex-comprehension
def spec(self) -> lit_types.Spec:
"""Should match MnliModel's input_spec()."""
return {
"premise": lit_types.TextSegment(),
"hypothesis": lit_types.TextSegment(),
# 'label' for 3-way NLI labels, 'label2' for binarized.
"label": lit_types.CategoryLabel(vocab=self.LABELS3),
"label2": lit_types.CategoryLabel(vocab=self.LABELS2),
"genre": lit_types.CategoryLabel(),
}
class HansNLIData(lit_dataset.Dataset):
"""HANS NLI challenge set (https://arxiv.org/abs/1902.01007); 30k examples."""
LABELS = ["non-entailment", "entailment"]
def __init__(self, path: str):
with open(path) as fd:
df = pd.read_csv(fd, sep="\t", header=0)
# pylint: disable=g-complex-comprehension
self._examples = [{
"premise": row["sentence1"],
"hypothesis": row["sentence2"],
"label2": row["gold_label"],
"heuristic": row["heuristic"],
"template": row["template"],
} for _, row in df.iterrows()]
# pylint: enable=g-complex-comprehension
def spec(self) -> lit_types.Spec:
return {
"premise": lit_types.TextSegment(),
"hypothesis": lit_types.TextSegment(),
# 'label2' for 2-way NLI labels
"label2": lit_types.CategoryLabel(vocab=self.LABELS),
"heuristic": lit_types.CategoryLabel(),
"template": lit_types.CategoryLabel(),
}
class IMDBData(lit_dataset.Dataset):
"""IMDB reviews dataset; see http://ai.stanford.edu/~amaas/data/sentiment/."""
LABELS = ["0", "1"]
def __init__(self, split="test", max_seq_len=500):
"""Dataset constructor, loads the data into memory."""
ds = tfds.load("imdb_reviews", split=split, download=True, try_gcs=True)
raw_examples = list(tfds.as_numpy(ds))
self._examples = [] # populate this with data records
for record in raw_examples:
# format and truncate from the end to max_seq_len tokens.
truncated_text = " ".join(
record["text"].decode("utf-8")\
.replace("<br />", "")\
.split()[-max_seq_len:])
self._examples.append({
"text": truncated_text,
"label": self.LABELS[record["label"]],
})
def spec(self) -> lit_types.Spec:
"""Dataset spec, which should match the model"s input_spec()."""
return {
"text": lit_types.TextSegment(),
"label": lit_types.CategoryLabel(vocab=self.LABELS),
}
| 33.736364 | 80 | 0.63972 |
802e5c7ac671cbfea1b2fc5ff733016ecf9f6af4 | 177 | py | Python | monkeys-and-frogs-on-fire/triple_vision/networking/__init__.py | markjoshua12/game-jam-2020 | 846dd052d649a609ab7a52ac0f4dcbeb71781c3b | [
"MIT"
] | 15 | 2020-04-17T12:02:14.000Z | 2022-03-16T03:01:34.000Z | monkeys-and-frogs-on-fire/triple_vision/networking/__init__.py | markjoshua12/game-jam-2020 | 846dd052d649a609ab7a52ac0f4dcbeb71781c3b | [
"MIT"
] | 49 | 2020-04-18T21:14:57.000Z | 2022-01-13T03:05:09.000Z | monkeys-and-frogs-on-fire/triple_vision/networking/__init__.py | markjoshua12/game-jam-2020 | 846dd052d649a609ab7a52ac0f4dcbeb71781c3b | [
"MIT"
] | 55 | 2020-04-17T12:01:11.000Z | 2021-12-28T10:14:02.000Z | from triple_vision.networking.client import Client
from triple_vision.networking.utils import get_status
client = Client()
client.connect()
__all__ = ('client', 'get_status')
| 22.125 | 53 | 0.79661 |
a9cd0c061667b64f670a9b038b225e3ad0a95dfb | 2,887 | py | Python | pymatflow/scripts/chg-vasp.py | DeqiTang/pymatflow | bd8776feb40ecef0e6704ee898d9f42ded3b0186 | [
"MIT"
] | 6 | 2020-03-06T16:13:08.000Z | 2022-03-09T07:53:34.000Z | pymatflow/scripts/chg-vasp.py | DeqiTang/pymatflow | bd8776feb40ecef0e6704ee898d9f42ded3b0186 | [
"MIT"
] | 1 | 2021-10-02T02:23:08.000Z | 2021-11-08T13:29:37.000Z | pymatflow/scripts/chg-vasp.py | DeqiTang/pymatflow | bd8776feb40ecef0e6704ee898d9f42ded3b0186 | [
"MIT"
] | 1 | 2021-07-10T16:28:14.000Z | 2021-07-10T16:28:14.000Z | #!/usr/bin/env python
import argparse
from pymatflow.cmd.structflow import read_structure
from pymatflow.cmd.structflow import write_structure
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", type=str, required=True,
help="input vasp *CHG* file")
parser.add_argument("--output-structure", type=str, default="chg.cif",
help="output stucture contained in PARCHG")
parser.add_argument("-o", "--output", type=str, default="chg",
help="prefix of the output image file name")
parser.add_argument("--output-option", type=int, nargs="+",
default=[1, 2, 3],
help="choose to output images of many kinds! (1)->grayscale image in z direction with scale bar; (2)->2D contour plot; (3)->grayscale image without scale bar")
parser.add_argument("--levels", type=int, default=10,
help="levels of the color map or color bar")
parser.add_argument("-z", "--z", type=float, default=1,
help="a value between 0 and 1, indicat height of in z direction to print the plot")
parser.add_argument("--cmap", type=str, default="gray",
choices=["gray", "hot", "afmhot", "Spectral", "plasma", "magma", "hsv", "rainbow", "brg"])
# ==========================================================
# transfer parameters from the arg subparser to static_run setting
# ==========================================================
args = parser.parse_args()
chg_filepath = args.input
from pymatflow.charge.chg_vasp import VaspCHG
vaspchg = VaspCHG()
vaspchg.get_chg(chg_filepath)
if 1 in args.output_option:
# -------------------------------------------------------
# gray scale image only for z direction
# may not work for triclinic and monoclinic crystal system
# -------------------------------------------------------
vaspchg.plot_grayscale_z(z=args.z, output_prefix=args.output)
if 2 in args.output_option:
# -----------------------------------------------------------------------------
# 2D contour plot
#------------------------------------------------------------------------------
vaspchg.plot_contour_2d(z=args.z, levels=args.levels, cmap=args.cmap, output_prefix=args.output)
if (3 in args.output_option) and vaspchg.is_orthogonal == True:
# -------------------------------------------------------
# gray scale image only for orthogonal crystal system
# -------------------------------------------------------
vaspchg.plot_grayscale_orthogonal(z=args.z, output_prefix=args.output)
write_structure(vaspchg.structure, filepath=args.output_structure)
if __name__ == "__main__":
main() | 40.661972 | 168 | 0.518878 |
8cec5abd03ef35b396baeef5b729ac3a33a5f954 | 78,915 | py | Python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2019_10_01/aio/operations/_resources_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 1 | 2021-09-07T18:35:49.000Z | 2021-09-07T18:35:49.000Z | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2019_10_01/aio/operations/_resources_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 4 | 2019-04-17T17:57:49.000Z | 2020-04-24T21:11:22.000Z | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2019_10_01/aio/operations/_resources_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 1 | 2019-04-05T18:17:43.000Z | 2019-04-05T18:17:43.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ResourcesOperations:
"""ResourcesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.resources.v2019_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_resource_group(
self,
resource_group_name: str,
filter: Optional[str] = None,
expand: Optional[str] = None,
top: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.ResourceListResult"]:
"""Get all the resources for a resource group.
:param resource_group_name: The resource group with the resources to get.
:type resource_group_name: str
:param filter: The filter to apply on the operation.:code:`<br>`:code:`<br>`The properties you
can use for eq (equals) or ne (not equals) are: location, resourceType, name, resourceGroup,
identity, identity/principalId, plan, plan/publisher, plan/product, plan/name, plan/version,
and plan/promotionCode.:code:`<br>`:code:`<br>`For example, to filter by a resource type, use:
$filter=resourceType eq 'Microsoft.Network/virtualNetworks':code:`<br>`:code:`<br>`You can use
substringof(value, property) in the filter. The properties you can use for substring are: name
and resourceGroup.:code:`<br>`:code:`<br>`For example, to get all resources with 'demo'
anywhere in the name, use: $filter=substringof('demo', name):code:`<br>`:code:`<br>`You can
link more than one substringof together by adding and/or operators.:code:`<br>`:code:`<br>`You
can filter by tag names and values. For example, to filter for a tag name and value, use
$filter=tagName eq 'tag1' and tagValue eq 'Value1'. When you filter by a tag name and value,
the tags for each resource are not returned in the results.:code:`<br>`:code:`<br>`You can use
some properties together when filtering. The combinations you can use are: substringof and/or
resourceType, plan and plan/publisher and plan/name, identity and identity/principalId.
:type filter: str
:param expand: Comma-separated list of additional properties to be included in the response.
Valid values include ``createdTime``\ , ``changedTime`` and ``provisioningState``. For example,
``$expand=createdTime,changedTime``.
:type expand: str
:param top: The number of results to return. If null is passed, returns all resources.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.resources.v2019_10_01.models.ResourceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ResourceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/resources'} # type: ignore
async def _move_resources_initial(
self,
source_resource_group_name: str,
parameters: "_models.ResourcesMoveInfo",
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._move_resources_initial.metadata['url'] # type: ignore
path_format_arguments = {
'sourceResourceGroupName': self._serialize.url("source_resource_group_name", source_resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ResourcesMoveInfo')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_move_resources_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources'} # type: ignore
async def begin_move_resources(
self,
source_resource_group_name: str,
parameters: "_models.ResourcesMoveInfo",
**kwargs
) -> AsyncLROPoller[None]:
"""Moves resources from one resource group to another resource group.
The resources to move must be in the same source resource group. The target resource group may
be in a different subscription. When moving resources, both the source group and the target
group are locked for the duration of the operation. Write and delete operations are blocked on
the groups until the move completes.
:param source_resource_group_name: The name of the resource group containing the resources to
move.
:type source_resource_group_name: str
:param parameters: Parameters for moving resources.
:type parameters: ~azure.mgmt.resource.resources.v2019_10_01.models.ResourcesMoveInfo
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._move_resources_initial(
source_resource_group_name=source_resource_group_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'sourceResourceGroupName': self._serialize.url("source_resource_group_name", source_resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_move_resources.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/moveResources'} # type: ignore
async def _validate_move_resources_initial(
self,
source_resource_group_name: str,
parameters: "_models.ResourcesMoveInfo",
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._validate_move_resources_initial.metadata['url'] # type: ignore
path_format_arguments = {
'sourceResourceGroupName': self._serialize.url("source_resource_group_name", source_resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ResourcesMoveInfo')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_validate_move_resources_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/validateMoveResources'} # type: ignore
async def begin_validate_move_resources(
self,
source_resource_group_name: str,
parameters: "_models.ResourcesMoveInfo",
**kwargs
) -> AsyncLROPoller[None]:
"""Validates whether resources can be moved from one resource group to another resource group.
This operation checks whether the specified resources can be moved to the target. The resources
to move must be in the same source resource group. The target resource group may be in a
different subscription. If validation succeeds, it returns HTTP response code 204 (no content).
If validation fails, it returns HTTP response code 409 (Conflict) with an error message.
Retrieve the URL in the Location header value to check the result of the long-running
operation.
:param source_resource_group_name: The name of the resource group containing the resources to
validate for move.
:type source_resource_group_name: str
:param parameters: Parameters for moving resources.
:type parameters: ~azure.mgmt.resource.resources.v2019_10_01.models.ResourcesMoveInfo
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._validate_move_resources_initial(
source_resource_group_name=source_resource_group_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'sourceResourceGroupName': self._serialize.url("source_resource_group_name", source_resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_validate_move_resources.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{sourceResourceGroupName}/validateMoveResources'} # type: ignore
def list(
self,
filter: Optional[str] = None,
expand: Optional[str] = None,
top: Optional[int] = None,
**kwargs
) -> AsyncIterable["_models.ResourceListResult"]:
"""Get all the resources in a subscription.
:param filter: The filter to apply on the operation.:code:`<br>`:code:`<br>`The properties you
can use for eq (equals) or ne (not equals) are: location, resourceType, name, resourceGroup,
identity, identity/principalId, plan, plan/publisher, plan/product, plan/name, plan/version,
and plan/promotionCode.:code:`<br>`:code:`<br>`For example, to filter by a resource type, use:
$filter=resourceType eq 'Microsoft.Network/virtualNetworks':code:`<br>`:code:`<br>`You can use
substringof(value, property) in the filter. The properties you can use for substring are: name
and resourceGroup.:code:`<br>`:code:`<br>`For example, to get all resources with 'demo'
anywhere in the name, use: $filter=substringof('demo', name):code:`<br>`:code:`<br>`You can
link more than one substringof together by adding and/or operators.:code:`<br>`:code:`<br>`You
can filter by tag names and values. For example, to filter for a tag name and value, use
$filter=tagName eq 'tag1' and tagValue eq 'Value1'. When you filter by a tag name and value,
the tags for each resource are not returned in the results.:code:`<br>`:code:`<br>`You can use
some properties together when filtering. The combinations you can use are: substringof and/or
resourceType, plan and plan/publisher and plan/name, identity and identity/principalId.
:type filter: str
:param expand: Comma-separated list of additional properties to be included in the response.
Valid values include ``createdTime``\ , ``changedTime`` and ``provisioningState``. For example,
``$expand=createdTime,changedTime``.
:type expand: str
:param top: The number of results to return. If null is passed, returns all resource groups.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.resources.v2019_10_01.models.ResourceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ResourceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resources'} # type: ignore
async def check_existence(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs
) -> bool:
"""Checks whether a resource exists.
:param resource_group_name: The name of the resource group containing the resource to check.
The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider of the resource to check.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource to check whether it exists.
:type resource_name: str
:param api_version: The API version to use for the operation.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool, or the result of cls(response)
:rtype: bool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.check_existence.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes a resource.
:param resource_group_name: The name of the resource group that contains the resource to
delete. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource to delete.
:type resource_name: str
:param api_version: The API version to use for the operation.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: "_models.GenericResource",
**kwargs
) -> Optional["_models.GenericResource"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GenericResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'GenericResource')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: "_models.GenericResource",
**kwargs
) -> AsyncLROPoller["_models.GenericResource"]:
"""Creates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to create.
:type resource_type: str
:param resource_name: The name of the resource to create.
:type resource_name: str
:param api_version: The API version to use for the operation.
:type api_version: str
:param parameters: Parameters for creating or updating the resource.
:type parameters: ~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GenericResource or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GenericResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
api_version=api_version,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: "_models.GenericResource",
**kwargs
) -> Optional["_models.GenericResource"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GenericResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'GenericResource')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'} # type: ignore
async def begin_update(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
parameters: "_models.GenericResource",
**kwargs
) -> AsyncLROPoller["_models.GenericResource"]:
"""Updates a resource.
:param resource_group_name: The name of the resource group for the resource. The name is case
insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to update.
:type resource_type: str
:param resource_name: The name of the resource to update.
:type resource_name: str
:param api_version: The API version to use for the operation.
:type api_version: str
:param parameters: Parameters for updating the resource.
:type parameters: ~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GenericResource or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GenericResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
api_version=api_version,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'} # type: ignore
async def get(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
api_version: str,
**kwargs
) -> "_models.GenericResource":
"""Gets a resource.
:param resource_group_name: The name of the resource group containing the resource to get. The
name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource.
:type resource_type: str
:param resource_name: The name of the resource to get.
:type resource_name: str
:param api_version: The API version to use for the operation.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GenericResource, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.GenericResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}'} # type: ignore
async def check_existence_by_id(
self,
resource_id: str,
api_version: str,
**kwargs
) -> bool:
"""Checks by ID whether a resource exists.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
:type resource_id: str
:param api_version: The API version to use for the operation.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool, or the result of cls(response)
:rtype: bool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.check_existence_by_id.metadata['url'] # type: ignore
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
return 200 <= response.status_code <= 299
check_existence_by_id.metadata = {'url': '/{resourceId}'} # type: ignore
async def _delete_by_id_initial(
self,
resource_id: str,
api_version: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self._delete_by_id_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_by_id_initial.metadata = {'url': '/{resourceId}'} # type: ignore
async def begin_delete_by_id(
self,
resource_id: str,
api_version: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
:type resource_id: str
:param api_version: The API version to use for the operation.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_by_id_initial(
resource_id=resource_id,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_by_id.metadata = {'url': '/{resourceId}'} # type: ignore
async def _create_or_update_by_id_initial(
self,
resource_id: str,
api_version: str,
parameters: "_models.GenericResource",
**kwargs
) -> Optional["_models.GenericResource"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GenericResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_by_id_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'GenericResource')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_by_id_initial.metadata = {'url': '/{resourceId}'} # type: ignore
async def begin_create_or_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: "_models.GenericResource",
**kwargs
) -> AsyncLROPoller["_models.GenericResource"]:
"""Create a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
:type resource_id: str
:param api_version: The API version to use for the operation.
:type api_version: str
:param parameters: Create or update resource parameters.
:type parameters: ~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GenericResource or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GenericResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_by_id_initial(
resource_id=resource_id,
api_version=api_version,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update_by_id.metadata = {'url': '/{resourceId}'} # type: ignore
async def _update_by_id_initial(
self,
resource_id: str,
api_version: str,
parameters: "_models.GenericResource",
**kwargs
) -> Optional["_models.GenericResource"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GenericResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_by_id_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'GenericResource')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_by_id_initial.metadata = {'url': '/{resourceId}'} # type: ignore
async def begin_update_by_id(
self,
resource_id: str,
api_version: str,
parameters: "_models.GenericResource",
**kwargs
) -> AsyncLROPoller["_models.GenericResource"]:
"""Updates a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
:type resource_id: str
:param api_version: The API version to use for the operation.
:type api_version: str
:param parameters: Update resource parameters.
:type parameters: ~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GenericResource or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GenericResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_by_id_initial(
resource_id=resource_id,
api_version=api_version,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_by_id.metadata = {'url': '/{resourceId}'} # type: ignore
async def get_by_id(
self,
resource_id: str,
api_version: str,
**kwargs
) -> "_models.GenericResource":
"""Gets a resource by ID.
:param resource_id: The fully qualified ID of the resource, including the resource name and
resource type. Use the format,
/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}.
:type resource_id: str
:param api_version: The API version to use for the operation.
:type api_version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GenericResource, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2019_10_01.models.GenericResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.GenericResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_by_id.metadata['url'] # type: ignore
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('GenericResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_id.metadata = {'url': '/{resourceId}'} # type: ignore
| 51.645942 | 223 | 0.668263 |
2edde6dd9980e1a4f55f38d350b64b93c75b8811 | 6,229 | py | Python | thrift/compiler/test/fixtures/merge_from/gen-py/foo/ttypes.py | donsbot/fbthrift | 11e343118082583eb4326d51ff19c343c61ed3cb | [
"Apache-2.0"
] | null | null | null | thrift/compiler/test/fixtures/merge_from/gen-py/foo/ttypes.py | donsbot/fbthrift | 11e343118082583eb4326d51ff19c343c61ed3cb | [
"Apache-2.0"
] | null | null | null | thrift/compiler/test/fixtures/merge_from/gen-py/foo/ttypes.py | donsbot/fbthrift | 11e343118082583eb4326d51ff19c343c61ed3cb | [
"Apache-2.0"
] | null | null | null | #
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
# @generated
#
from __future__ import absolute_import
import sys
from thrift.util.Recursive import fix_spec
from thrift.Thrift import TType, TMessageType, TPriority, TRequestContext, TProcessorEventHandler, TServerInterface, TProcessor, TException, TApplicationException, UnimplementedTypedef
from thrift.protocol.TProtocol import TProtocolException
import pprint
import warnings
from thrift import Thrift
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.protocol import TCompactProtocol
from thrift.protocol import THeaderProtocol
fastproto = None
try:
from thrift.protocol import fastproto
except ImportError:
pass
all_structs = []
UTF8STRINGS = bool(0) or sys.version_info.major >= 3
__all__ = ['UTF8STRINGS', 'Fields']
class Fields:
"""
Attributes:
- injected_field
"""
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
@staticmethod
def isUnion():
return False
def read(self, iprot):
if (isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
return
if (isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastproto is not None:
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 100:
if ftype == TType.STRING:
self.injected_field = iprot.readString().decode('utf-8') if UTF8STRINGS else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if (isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if (isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL)) and self.thrift_spec is not None and fastproto is not None:
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('Fields')
if self.injected_field != None:
oprot.writeFieldBegin('injected_field', TType.STRING, 100)
oprot.writeString(self.injected_field.encode('utf-8')) if UTF8STRINGS and not isinstance(self.injected_field, bytes) else oprot.writeString(self.injected_field)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = ' ' * 4
if self.injected_field is not None:
value = pprint.pformat(self.injected_field, indent=0)
value = padding.join(value.splitlines(True))
L.append(' injected_field=%s' % (value))
return "%s(%s)" % (self.__class__.__name__, "\n" + ",\n".join(L) if L else '')
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
# Override the __hash__ function for Python3 - t10434117
__hash__ = object.__hash__
all_structs.append(Fields)
Fields.thrift_spec = (
None, # 0
None, # 1
None, # 2
None, # 3
None, # 4
None, # 5
None, # 6
None, # 7
None, # 8
None, # 9
None, # 10
None, # 11
None, # 12
None, # 13
None, # 14
None, # 15
None, # 16
None, # 17
None, # 18
None, # 19
None, # 20
None, # 21
None, # 22
None, # 23
None, # 24
None, # 25
None, # 26
None, # 27
None, # 28
None, # 29
None, # 30
None, # 31
None, # 32
None, # 33
None, # 34
None, # 35
None, # 36
None, # 37
None, # 38
None, # 39
None, # 40
None, # 41
None, # 42
None, # 43
None, # 44
None, # 45
None, # 46
None, # 47
None, # 48
None, # 49
None, # 50
None, # 51
None, # 52
None, # 53
None, # 54
None, # 55
None, # 56
None, # 57
None, # 58
None, # 59
None, # 60
None, # 61
None, # 62
None, # 63
None, # 64
None, # 65
None, # 66
None, # 67
None, # 68
None, # 69
None, # 70
None, # 71
None, # 72
None, # 73
None, # 74
None, # 75
None, # 76
None, # 77
None, # 78
None, # 79
None, # 80
None, # 81
None, # 82
None, # 83
None, # 84
None, # 85
None, # 86
None, # 87
None, # 88
None, # 89
None, # 90
None, # 91
None, # 92
None, # 93
None, # 94
None, # 95
None, # 96
None, # 97
None, # 98
None, # 99
(100, TType.STRING, 'injected_field', True, None, 2, ), # 100
)
Fields.thrift_struct_annotations = {
}
Fields.thrift_field_annotations = {
}
def Fields__init__(self, injected_field=None,):
self.injected_field = injected_field
Fields.__init__ = Fields__init__
def Fields__setstate__(self, state):
state.setdefault('injected_field', None)
self.__dict__ = state
Fields.__getstate__ = lambda self: self.__dict__.copy()
Fields.__setstate__ = Fields__setstate__
fix_spec(all_structs)
del all_structs
| 27.200873 | 339 | 0.692085 |
2cbf7aafcc915fd7c875a71d6c861587f3d29c4e | 2,337 | py | Python | main.py | Liquidibrium/http-server | 73abc39123d38ac2461155595b44733b1621078a | [
"MIT"
] | null | null | null | main.py | Liquidibrium/http-server | 73abc39123d38ac2461155595b44733b1621078a | [
"MIT"
] | null | null | null | main.py | Liquidibrium/http-server | 73abc39123d38ac2461155595b44733b1621078a | [
"MIT"
] | null | null | null | import socket
import threading
import traceback
import RequestHandler
import data
server_sockets = []
# server creates class insance HTTP Request Handler,
# which handles clinet request and sends apropriate response
# if error is occure client_socket will be closed
def handle_http_request(client_socket, address):
try:
handeler = RequestHandler.HTTPRequestHandler(client_socket, address)
while True:
message = client_socket.recv(data.CLIENT_MAX_MESSAGE_SIZE).decode()
if not message:
client_socket.close()
return
if not handeler.handle(message):
return
except Exception as exc:
print(f'exception happened : {exc}\nclosing client socket')
client_socket.close()
# accept connection with clients,
# handle messages from them in new thread
def get_clients(server_socket, address):
try:
while True:
client_socket, address = server_socket.accept()
thread = threading.Thread(
target=handle_http_request, args=(client_socket, address))
thread.start()
except Exception:
server_socket.close()
print('catched KeyboardInterrupt, closing server socket')
def open_sockets(server_address):
# TCP server socket
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.bind(server_address)
# number of parallel connections to wait for accept
server_socket.listen(data.PARALELL_CONN)
return server_socket
def close_sever_sockets():
for socket in server_sockets:
socket.close()
# start unique ip:port sockets
# in new thread to process clients requests
def start_severs():
ip_port_dict = data.get_unique_addresses()
for ip in ip_port_dict.keys():
for port in ip_port_dict[ip]:
server_address = (ip, port)
socket = open_sockets(server_address)
server_sockets.append(socket)
thread = threading.Thread(
target=get_clients, args=(socket, server_address))
thread.start()
print("All servers are ready ")
# starts main process
# servers are going to work forever unless Exceprion happens
try:
start_severs()
except Exception as exc:
traceback.print_exc()
close_sever_sockets()
| 31.581081 | 79 | 0.68635 |
a270ff067de794a5f16c7e0a4cb0a0134d25d7c6 | 2,278 | py | Python | test/tests/function_instancemethod.py | kevinxucs/pyston | bdb87c1706ac74a0d15d9bc2bae53798678a5f14 | [
"Apache-2.0"
] | 1 | 2015-11-06T03:39:51.000Z | 2015-11-06T03:39:51.000Z | test/tests/function_instancemethod.py | kevinxucs/pyston | bdb87c1706ac74a0d15d9bc2bae53798678a5f14 | [
"Apache-2.0"
] | null | null | null | test/tests/function_instancemethod.py | kevinxucs/pyston | bdb87c1706ac74a0d15d9bc2bae53798678a5f14 | [
"Apache-2.0"
] | null | null | null | # TODO test all of this with getclsattr
# TODO should make an ics test
class C(object):
def f():
pass
def g():
print 'running g'
print C.f == C.f
print C.f is C.f
print C().f == C().f
print C().f is C().f
#### Check the types of stuff
print type(C.f) # instancemethod
print type(C().f) # instancemethod
print type(g) # function
C.g = g
print type(C.g) # instancemethod
print type(C().g) # instancemethod
#### Assign a function to an instance
c = C()
c.g = g
print type(c.g) # function
c.g()
print c.g == c.g
print c.g is c.g
#### Assign a function to a class
def l(inst):
print 'running l', inst.i
C.l = l
print type(C.l) #instancemethod
print type(C().l) #instancemethod
c1 = C()
c1.i = 1
C.l(c1)
c1.l()
print c1.l == c1.l
print c1.l is c1.l
print C.l == C.l
print C.l is C.l
#### Assign a bound instancemethod to a class
C.k = c1.l # set C.k to a bound instancemethod
C.k() # this should call l with with c1 as the arg
c2 = C()
c2.i = 2
c2.k() # this should just call l with c1 as the arg, not try to bind anything else
print type(C.k) # instancemethod
print type(c2.k) # instancemethod
print c2.k == c2.k
print c2.k is c2.k
print C.k == C.k
print C.k is C.k
print C.k == c2.k
print C.k is c2.k
#### Assign an unbound instancemethod to a class
#### Getting is will bind it like a normal function
# TODO implement instancemethod stuff so this case works
"""
C.m = C.l
print type(C.m) #instancemethod
print type(C().m) #instancemethod
c3 = C()
c3.i = 3
C.m(c3)
c3.m()
print c3.m == c3.m
print c3.m is c3.m
print C.m == C.m
print C.m is C.m
"""
### Assign a bound instancemethod to an instance
c4 = C()
c4.i = 4
c4.z = c1.l
print type(c4.z) # instancemethod
c4.z() # should call l(c1)
print c4.z == c4.z
print c4.z is c4.z
### Assign an unbound instancemethod to an instance
c4 = C()
c4.i = 4
c4.z = C.l
print type(c4.z) # instancemethod
c4.z(c1) # should call l(c1)
print c4.z == c4.z
print c4.z is c4.z
### Call a bound instancemethod on its own (not through the callattr path)
bound_instancemethod = c1.l
bound_instancemethod()
print type(bound_instancemethod)
### Call an unbound instancemethod on its own (not through the callattr path)
unbound_instancemethod = C.l
unbound_instancemethod(c2)
print type(unbound_instancemethod)
| 18.983333 | 82 | 0.671642 |
aeac46a6a80b31750c80612641dca8e1a4337462 | 22,169 | py | Python | bokeh/document/events.py | charliezhan/bokeh | 5b8263cd5281494326d2fcd57745a69a1c2e6d93 | [
"BSD-3-Clause"
] | 1 | 2020-02-06T05:27:53.000Z | 2020-02-06T05:27:53.000Z | bokeh/document/events.py | charliezhan/bokeh | 5b8263cd5281494326d2fcd57745a69a1c2e6d93 | [
"BSD-3-Clause"
] | null | null | null | bokeh/document/events.py | charliezhan/bokeh | 5b8263cd5281494326d2fcd57745a69a1c2e6d93 | [
"BSD-3-Clause"
] | null | null | null | ''' Provide events that represent various changes to Bokeh Documents.
These events are used internally to signal changes to Documents. For
information about user-facing (e.g. UI or tool) events, see the reference
for :ref:`bokeh.events`.
'''
from __future__ import absolute_import
class DocumentChangedEvent(object):
''' Base class for all internal events representing a change to a
Bokeh Document.
'''
def __init__(self, document, setter=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
In the context of a Bokeh server application, incoming updates
to properties will be annotated with the session that is
doing the updating. This value is propagated through any
subsequent change notifications that the update triggers.
The session can compare the event setter to itself, and
suppress any updates that originate from itself.
'''
self.document = document
self.setter = setter
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._document_changed`` if it exists.
'''
if hasattr(receiver, '_document_changed'):
receiver._document_changed(self)
class DocumentPatchedEvent(DocumentChangedEvent):
''' A Base class for events that represent updating Bokeh Models and
their properties.
'''
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._document_patched`` if it exists.
'''
super(DocumentPatchedEvent, self).dispatch(receiver)
if hasattr(receiver, '_document_patched'):
receiver._document_patched(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
*Sub-classes must implement this method.*
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
raise NotImplementedError()
class ModelChangedEvent(DocumentPatchedEvent):
''' A concrete event representing updating an attribute and value of a
specific Bokeh Model.
This is the "standard" way of updating most Bokeh model attributes. For
special casing situations that can optimized (e.g. streaming, etc.), a
``hint`` may be supplied that overrides normal mechanisms.
'''
def __init__(self, document, model, attr, old, new, serializable_new, hint=None, setter=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
model (Model) :
A Model to update
attr (str) :
The name of the attribute to update on the model.
old (object) :
The old value of the attribute
new (object) :
The new value of the attribute
serializable_new (object) :
A serialized (JSON) version of the new value. It may be
``None`` if a hint is supplied.
hint (DocumentPatchedEvent, optional) :
When appropriate, a secondary event may be supplied that
modifies the normal update process. For example, in order
to stream or patch data more efficiently than the standard
update mechanism.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
'''
if setter is None and isinstance(hint, (ColumnsStreamedEvent, ColumnsPatchedEvent)):
setter = hint.setter
super(ModelChangedEvent, self).__init__(document, setter)
self.model = model
self.attr = attr
self.old = old
self.new = new
self.serializable_new = serializable_new
self.hint = hint
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._document_model_dhanged`` if it
exists.
'''
super(ModelChangedEvent, self).dispatch(receiver)
if hasattr(receiver, '_document_model_changed'):
receiver._document_model_changed(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
from ..model import collect_models
if self.hint is not None:
return self.hint.generate(references, buffers)
value = self.serializable_new
# the new value is an object that may have
# not-yet-in-the-remote-doc references, and may also
# itself not be in the remote doc yet. the remote may
# already have some of the references, but
# unfortunately we don't have an easy way to know
# unless we were to check BEFORE the attr gets changed
# (we need the old _all_models before setting the
# property). So we have to send all the references the
# remote could need, even though it could be inefficient.
# If it turns out we need to fix this we could probably
# do it by adding some complexity.
value_refs = set(collect_models(value))
# we know we don't want a whole new copy of the obj we're patching
# unless it's also the new value
if self.model != value:
value_refs.discard(self.model)
references.update(value_refs)
return { 'kind' : 'ModelChanged',
'model' : self.model.ref,
'attr' : self.attr,
'new' : value }
class ColumnDataChangedEvent(DocumentPatchedEvent):
''' A concrete event representing efficiently replacing *all*
existing data for a :class:`~bokeh.models.sources.ColumnDataSource`
'''
def __init__(self, document, column_source, setter=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
column_source (ColumnDataSource) :
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
'''
super(ColumnDataChangedEvent, self).__init__(document, setter)
self.column_source = column_source
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._column_data_changed`` if it exists.
'''
super(ColumnDataChangedEvent, self).dispatch(receiver)
if hasattr(receiver, '_column_data_changed)'):
receiver._column_data_changed(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'ColumnDataChanged'
'column_source' : <reference to a CDS>
'new' : <new data to steam to column_source>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
from ..util.serialization import transform_column_source_data
return { 'kind' : 'ColumnDataChanged',
'column_source' : self.column_source.ref,
'new' : transform_column_source_data(self.column_source.data) }
class ColumnsStreamedEvent(DocumentPatchedEvent):
''' A concrete event representing efficiently streaming new data
to a :class:`~bokeh.models.sources.ColumnDataSource`
'''
def __init__(self, document, column_source, data, rollover, setter=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
column_source (ColumnDataSource) :
The data source to stream new data to.
data (dict) :
rollover (int) :
A rollover limit. If the data source columns exceed this
limit, earlier values will be discarded to maintain the
column length under the limit.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
'''
super(ColumnsStreamedEvent, self).__init__(document, setter)
self.column_source = column_source
self.data = data
self.rollover = rollover
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._columns_streamed`` if it exists.
'''
super(ColumnsStreamedEvent, self).dispatch(receiver)
if hasattr(receiver, '_columns_streamed'):
receiver._columns_streamed(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'ColumnsStreamed'
'column_source' : <reference to a CDS>
'data' : <new data to steam to column_source>
'rollover' : <rollover limit>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
return { 'kind' : 'ColumnsStreamed',
'column_source' : self.column_source.ref,
'data' : self.data,
'rollover' : self.rollover }
class ColumnsPatchedEvent(DocumentPatchedEvent):
''' A concrete event representing efficiently applying data patches
to a :class:`~bokeh.models.sources.ColumnDataSource`
'''
def __init__(self, document, column_source, patches, setter=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
column_source (ColumnDataSource) :
The data source to apply patches to.
patches (list) :
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
'''
super(ColumnsPatchedEvent, self).__init__(document, setter)
self.column_source = column_source
self.patches = patches
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._columns_patched`` if it exists.
'''
super(ColumnsPatchedEvent, self).dispatch(receiver)
if hasattr(receiver, '_columns_patched'):
receiver._columns_patched(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'ColumnsPatched'
'column_source' : <reference to a CDS>
'patches' : <patches to apply to column_source>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
return { 'kind' : 'ColumnsPatched',
'column_source' : self.column_source.ref,
'patches' : self.patches }
class TitleChangedEvent(DocumentPatchedEvent):
''' A concrete event representing a change to the title of a Bokeh
Document.
'''
def __init__(self, document, title, setter=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
title (str) :
The new title to set on the Document
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
'''
super(TitleChangedEvent, self).__init__(document, setter)
self.title = title
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'TitleChanged'
'title' : <new title to set>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
return { 'kind' : 'TitleChanged',
'title' : self.title }
class RootAddedEvent(DocumentPatchedEvent):
''' A concrete event representing a change to add a new Model to a
Document's collection of "root" models.
'''
def __init__(self, document, model, setter=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
model (Model) :
The Bokeh Model to add as a Document root.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
'''
super(RootAddedEvent, self).__init__(document, setter)
self.model = model
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'RootAdded'
'title' : <reference to a Model>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
references.update(self.model.references())
return { 'kind' : 'RootAdded',
'model' : self.model.ref }
class RootRemovedEvent(DocumentPatchedEvent):
''' A concrete event representing a change to remove an existing Model
from a Document's collection of "root" models.
'''
def __init__(self, document, model, setter=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
model (Model) :
The Bokeh Model to remove as a Document root.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
'''
super(RootRemovedEvent, self).__init__(document, setter)
self.model = model
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'RootRemoved'
'title' : <reference to a Model>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
return { 'kind' : 'RootRemoved',
'model' : self.model.ref }
class SessionCallbackAdded(DocumentChangedEvent):
''' A concrete event representing a change to add a new callback (e.g.
periodic, timeout, or "next tick") to a Document.
'''
def __init__(self, document, callback):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
callback (SessionCallback) :
The callback to add
'''
super(SessionCallbackAdded, self).__init__(document)
self.callback = callback
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._session_callback_added`` if
it exists.
'''
super(SessionCallbackAdded, self).dispatch(receiver)
if hasattr(receiver, '_session_callback_added'):
receiver._session_callback_added(self)
class SessionCallbackRemoved(DocumentChangedEvent):
''' A concrete event representing a change to remove an existing callback
(e.g. periodic, timeout, or "next tick") from a Document.
'''
def __init__(self, document, callback):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
callback (SessionCallback) :
The callback to remove
'''
super(SessionCallbackRemoved, self).__init__(document)
self.callback = callback
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._session_callback_removed`` if
it exists.
'''
super(SessionCallbackRemoved, self).dispatch(receiver)
if hasattr(receiver, '_session_callback_removed'):
receiver._session_callback_removed(self)
| 33.387048 | 98 | 0.582706 |
88a0f91690258375a27829200682bdc2f2876182 | 1,660 | py | Python | stubs.min/Autodesk/Revit/DB/__init___parts/FamilySizeTableColumn.py | ricardyn/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | 1 | 2021-02-02T13:39:16.000Z | 2021-02-02T13:39:16.000Z | stubs.min/Autodesk/Revit/DB/__init___parts/FamilySizeTableColumn.py | hdm-dt-fb/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | null | null | null | stubs.min/Autodesk/Revit/DB/__init___parts/FamilySizeTableColumn.py | hdm-dt-fb/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | null | null | null | class FamilySizeTableColumn(object,IDisposable):
""" Contains column information for a FamilySizeTable. """
def Dispose(self):
""" Dispose(self: FamilySizeTableColumn) """
pass
def ReleaseUnmanagedResources(self,*args):
""" ReleaseUnmanagedResources(self: FamilySizeTableColumn,disposing: bool) """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
DisplayUnitType=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The Display unit type of the column.
Get: DisplayUnitType(self: FamilySizeTableColumn) -> DisplayUnitType
"""
IsValidObject=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Specifies whether the .NET object represents a valid Revit entity.
Get: IsValidObject(self: FamilySizeTableColumn) -> bool
"""
Name=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The column name.
Get: Name(self: FamilySizeTableColumn) -> str
"""
UnitType=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""The Unit type of the column.
Get: UnitType(self: FamilySizeTableColumn) -> UnitType
"""
| 33.2 | 215 | 0.7 |
a9daa88b0487f5f83128befeadda27149a2cdd0b | 22,378 | py | Python | application/third_party/webplugin/wsgi/webplugin_example.py | ucielp/clusters_web | 40affe91f94200bfbac0fd9197474f9cbb648abf | [
"MIT"
] | null | null | null | application/third_party/webplugin/wsgi/webplugin_example.py | ucielp/clusters_web | 40affe91f94200bfbac0fd9197474f9cbb648abf | [
"MIT"
] | null | null | null | application/third_party/webplugin/wsgi/webplugin_example.py | ucielp/clusters_web | 40affe91f94200bfbac0fd9197474f9cbb648abf | [
"MIT"
] | null | null | null | import sys
import re
from string import strip
#sys.path.insert(0, "/home/services/software/ete3-webplugin/")
sys.path.insert(0, "/home/jhuerta/_Devel/ete/ete31-beta/")
from ete3 import WebTreeApplication # Required to use the webplugin
from ete3 import PhyloTree, TreeStyle, faces # Required by my custom
# application
# In order to extend the default WebTreeApplication, we define our own
# WSGI function that handles URL queries
def example_app(environ, start_response, queries):
asked_method = environ['PATH_INFO'].split("/")
URL = environ['REQUEST_URI']
# expected arguments from the URL (POST or GET method)
param_seqid = queries.get("seqid", [None])[0]
param_browser_id_start = queries.get("browser_id_start", [None])[0]
start_response('202 OK', [('content-type', 'text/plain')])
if asked_method[1]=="draw_tree":
if None in set([param_seqid]):
return "Not enough params"
return html_seqid_info(param_seqid)
else:
return "Tachan!!"
# ==============================================================================
# TREE LOADER
#
# This is my tree loading functions. I want the WebTreeApplication to
# is this method to load the trees
# ==============================================================================
# Custom Tree loader
def extract_species_code(name):
return str(name).split("_")[-1].strip()
def my_tree_loader(tree):
""" This is function is used to load trees within the
WebTreeApplication object. """
t = PhyloTree(tree, sp_naming_function=extract_species_code)
return t
# ==============================================================================
# CUSTOM LAYOUTS
#
# This are my layout functions. I want the WebTreeApplication to use
# them for rendering trees
# ==============================================================================
LEAVE_FACES = [] # Global var that stores the faces that are rendered
# by the layout function
def main_layout(node):
''' Main layout function. It controls what is shown in tree
images. '''
# Add faces to leaf nodes. This allows me to add the faces from
# the global variable LEAVE_FACES, which is set by the application
# controler according to the arguments passed through the URL.
if node.is_leaf():
for f, fkey, pos in LEAVE_FACES:
if hasattr(node, fkey):
faces.add_face_to_node(f, node, column=pos, position="branch-right")
else:
# Add special faces on collapsed nodes
if hasattr(node, "hide") and int(node.hide) == 1:
node.img_style["draw_descendants"]= False
collapsed_face = faces.TextFace(\
" %s collapsed leaves." %len(node), \
fsize=10, fgcolor="#444", ftype="Arial")
faces.add_face_to_node(collapsed_face, node, 0)
else:
node.img_style["draw_descendants"] = True
# Set node aspect. This controls which node features are used to
# control the style of the tree. You can add or modify this
# features, as well as their behaviour
if node.is_leaf():
node.img_style["shape"] = "square"
node.img_style["size"] = 4
else:
node.img_style["size"] = 8
node.img_style["shape"] = "sphere"
# Evoltype: [D]uplications, [S]peciations or [L]osess.
if hasattr(node,"evoltype"):
if node.evoltype == 'D':
node.img_style["fgcolor"] = "#1d176e"
node.img_style["hz_line_color"] = "#1d176e"
node.img_style["vt_line_color"] = "#1d176e"
elif node.evoltype == 'S':
node.img_style["fgcolor"] = "#FF0000"
node.img_style["line_color"] = "#FF0000"
elif node.evoltype == 'L':
node.img_style["fgcolor"] = "#777777"
node.img_style["vt_line_color"] = "#777777"
node.img_style["hz_line_color"] = "#777777"
node.img_style["line_type"] = 1
# If no evolutionary information, set a default style
else:
node.img_style["fgcolor"] = "#000000"
node.img_style["vt_line_color"] = "#000000"
node.img_style["hz_line_color"] = "#000000"
# Parse node features features and conver them into styles. This
# must be done like this, since current ete version does not allow
# modifying style outside the layout function.
if hasattr(node, "bsize"):
node.img_style["size"]= int(node.bsize)
if hasattr(node, "shape"):
node.img_style["shape"]= node.shape
if hasattr(node, "bgcolor"):
node.img_style["bgcolor"]= node.bgcolor
if hasattr(node, "fgcolor"):
node.img_style["fgcolor"]= node.fgcolor
# ==============================================================================
# Checker function definitions:
#
# All checker actions must receive a node instance as unique argument
# and return True (node passes the filters) or False (node does not
# passes the filters).
#
# ==============================================================================
can_expand = lambda node: not node.is_leaf() and (hasattr(node, "hide") and node.hide==True)
can_collapse = lambda node: not node.is_leaf() and (not hasattr(node, "hide") or node.hide==False)
is_leaf = lambda node: node.is_leaf()
is_not_leaf = lambda node: not node.is_leaf()
# ==============================================================================
# Handler function definitions:
#
# All action handler functions must receive a node instance as unique
# argument. Returns are ignored.
#
# Note that there is a special action handler designed for searches
# within the tree. Handler receives node and searched term.
#
# ==============================================================================
def collapse(node):
node.add_feature("hide", 1)
node.add_feature("bsize", 25)
node.add_feature("shape", "sphere")
node.add_feature("fgcolor", "#bbbbbb")
def expand(node):
try:
node.del_feature("hide")
node.del_feature("bsize")
node.del_feature("shape")
node.del_feature("fgcolor")
except (KeyError, AttributeError):
pass
def swap_branches(node):
node.children.reverse()
def set_red(node):
node.add_feature("fgcolor", "#ff0000")
node.add_feature("bsize", 40)
node.add_feature("shape", "sphere")
def set_bg(node):
node.add_feature("bgcolor", "#CEDBC4")
def set_as_root(node):
node.get_tree_root().set_outgroup(node)
def phylomedb_clean_layout(node):
phylomedb_layout(node)
node.img_style["size"]=0
def search_by_feature(tree, search_term):
''' Special action '''
attr, term = search_term.split("::")
if not term:
return None
elif attr == "clean" and term == "clean":
for n in tree.traverse():
try:
n.del_feature("bsize")
n.del_feature("shape")
n.del_feature("fgcolor")
except (KeyError, AttributeError):
pass
else:
for n in tree.traverse():
if hasattr(n, attr) and \
re.search(term, str(getattr(n, attr)), re.IGNORECASE):
n.add_feature("bsize", 16)
n.add_feature("shape", "sphere")
n.add_feature("fgcolor", "#BB8C2B")
# ==============================================================================
# HTML generators
#
# Actions will be automatically added to the popup menus and attached
# to the action handler function. However, if just want to add
# informative items to the popup menu or external actions not
# associated to any handler, you can overwrite the default html
# generator of each action.
#
# html generators receive all information attached to the node and
# action in 5 arguments:
#
# * aindex: index of the action associated to this html generator
#
# * nodeid: id of the node to which action is attached
#
# * treeid: id of the tree in which node is present
#
# * text: the text string associated to the element that raised the
# action (only applicable to text faces actions)
#
# * node: node instance in which action will be executed.
#
#
# Html generator should return a text string encoding a html list
# item:
#
# Example: return "<li> my text </li>"
#
# ==============================================================================
def branch_info(aindex, nodeid, treeid, text, node):
''' It shows some info of the node in the popup menu '''
return """
<li style="background:#eee; font-size:8pt;">
<div style="text-align:left;font-weight:bold;">
NODE ACTIONS
</div>
(<b>Branch: </b>%0.3f <b>Support:</b> %0.3f)<br>
</li>""" %\
(node.dist, node.support)
def search_in_ensmbl(aindex, nodeid, treeid, text, node):
return '''<li>
<a target="_blank" href="http://www.ensembl.org/common/Search/Results?species=all;idx=;q=%s">
<img src=""> Search in ensembl: %s >
</a>
</li> ''' %\
(node.name, node.name)
def external_links_divider(aindex, nodeid, treeid, text, node):
''' Used to show a separator in the popup menu'''
if node.is_leaf():
return """<li
style="background:#eee;font-size:8pt;"><b>External
links</b></li>"""
else:
return ""
def topology_action_divider(aindex, nodeid, treeid, text, node):
return """<li style="background:#eee;"><b>Tree node actions</b></li>"""
# ==============================================================================
# TREE RENDERER
#
# By default, ETE will render the tree as a png image and will return
# a simplistic HTML code to show the image and interact with
# it. However, it is possible to wrap such functionality to preprocess
# trees in a particular way, read extra parameters from the URL query
# and/or produce enriched HTML applications.
#
# Tree renderer wrappers receive the tree object, its id, and the WSGI
# application object. They MUST call the
# application._get_tree_img(tree) method and return the a HTML
# string.
#
# A simplistic wrapper that emulates the default WebTreeApplication
# behaviour would be:
#
# def tree_renderer(tree, treeid, application):
# html = application._get_tree_img(treeid = treeid)
# return html
#
# ==============================================================================
def tree_renderer(tree, treeid, application):
# The following part controls the features that are attched to
# leaf nodes and that will be shown in the tree image. Node styles
# are set it here, and faces are also created. The idea is the
# following: user can pass feature names using the URL argument
# "tree_features". If the feature is handled by our function and
# it is available in nodes, a face will be created and added to
# the global variable LEAVE_FACES. Remember that our layout
# function uses such variable to add faces to nodes during
# rendering.
# Extracts from URL query the features that must be drawn in the tree
asked_features = application.queries.get("show_features", ["name"])[0].split(",")
print >>sys.stderr, asked_features
def update_features_avail(feature_key, name, col, fsize, fcolor, prefix, suffix):
text_features_avail.setdefault(feature_key, [name, 0, col, fsize, fcolor, prefix, suffix])
text_features_avail[feature_key][1] += 1
tree.add_feature("fgcolor", "#833DB4")
tree.add_feature("shape", "sphere")
tree.add_feature("bsize", "8")
tree.dist = 0
# This are the features that I wanto to convert into image
# faces. I use an automatic function to do this. Each element in
# the dictionary is a list that contains the information about how
# to create a textFace with the feature.
leaves = tree.get_leaves()
formated_features = {
# feature_name: ["Description", face column position, text_size, color, text_prefix, text_suffix ]
"name": ["Leaf name", len(leaves), 0, 12, "#000000", "", ""],
"spname": ["Species name", len(leaves), 1, 12, "#f00000", " Species:", ""],
}
# populates the global LEAVE_FACES variable
global LEAVE_FACES
LEAVE_FACES = []
unknown_faces_pos = 2
for fkey in asked_features:
if fkey in formated_features:
name, total, pos, size, color, prefix, suffix = formated_features[fkey]
f = faces.AttrFace(fkey, ftype="Arial", fsize=size, fgcolor=color, text_prefix=prefix, text_suffix=suffix)
LEAVE_FACES.append([f, fkey, pos])
else:
# If the feature has no associated format, let's add something standard
prefix = " %s: " %fkey
f = faces.AttrFace(fkey, ftype="Arial", fsize=10, fgcolor="#666666", text_prefix=prefix, text_suffix="")
LEAVE_FACES.append([f, fkey, unknown_faces_pos])
unknown_faces_pos += 1
text_features_avail = {}
for l in leaves:
for f in l.features:
if not f.startswith("_"):
text_features_avail.setdefault(f, 0)
text_features_avail[f] = text_features_avail[f] + 1
html_features = """
<div id="tree_features_box">
<div class="tree_box_header">Available tree features
<img src="/webplugin/close.png" onclick='$(this).closest("#tree_features_box").hide();'>
</div>
<form action='javascript: set_tree_features("", "", "");'>
"""
for fkey, counter in text_features_avail.iteritems():
if fkey in asked_features:
tag = "CHECKED"
else:
tag = ""
fname = formated_features.get(fkey, [fkey])[0]
#html_features = "<tr>"
html_features += '<INPUT NAME="tree_feature_selector" TYPE=CHECKBOX %s VALUE="%s">%s (%s/%s leaves)</input><br> ' %\
(tag, fkey, fname, counter, len(leaves))
# html_features += '<td><INPUT size=7 type="text"></td> <td><input size=7 type="text"></td> <td><input size=7 type="text"></td> <td><input size=1 type="text"></td><br>'
#html_features += "</tr>"
html_features += """<input type="submit" value="Refresh"
onclick='javascript:
// This piece of js code extracts the checked features from menu and redraw the tree sending such information
var allVals = [];
$(this).parent().children("input[name=tree_feature_selector]").each(function(){
if ($(this).is(":checked")){
allVals.push($(this).val());
}});
draw_tree("%s", "", "#img1", {"show_features": allVals.join(",")} );'
>
</form></div>""" %(treeid)
features_button = """
<li><a href="#" onclick='show_box(event, $(this).closest("#tree_panel").children("#tree_features_box"));'>
<img width=16 height=16 src="/webplugin/icon_tools.png" alt="Select Tree features">
</a></li>"""
download_button = """
<li><a href="/webplugin/tmp/%s.png" target="_blank">
<img width=16 height=16 src="/webplugin/icon_attachment.png" alt="Download tree image">
</a></li>""" %(treeid)
search_button = """
<li><a href="#" onclick='javascript:
var box = $(this).closest("#tree_panel").children("#search_in_tree_box");
show_box(event, box); '>
<img width=16 height=16 src="/webplugin/icon_search.png" alt="Search in tree">
</a></li>"""
clean_search_button = """
<li><a href="#" onclick='run_action("%s", "", %s, "clean::clean");'>
<img width=16 height=16 src="/webplugin/icon_cancel_search.png" alt="Clear search results">
</a></li>""" %\
(treeid, 0)
buttons = '<div id="ete_tree_buttons">' +\
features_button + search_button + clean_search_button + download_button +\
'</div>'
search_select = '<select id="ete_search_target">'
for fkey in text_features_avail:
search_select += '<option value="%s">%s</option>' %(fkey,fkey)
search_select += '</select>'
search_form = """
<div id="search_in_tree_box">
<div class="tree_box_header"> Search in Tree
<img src="/webplugin/close.png" onclick='$(this).closest("#search_in_tree_box").hide();'>
</div>
<form onsubmit='javascript:
search_in_tree("%s", "%s",
$(this).closest("form").children("#ete_search_term").val(),
$(this).closest("form").children("#ete_search_target").val());'
action="javascript:void(0);">
%s
<input id="ete_search_term" type="text" value=""'></input>
<br><i>(Searches are not case sensitive and accept Perl regular expressions)</i>
<br>
</form>
<i> (Press ENTER to initiate the search)</i>
</div>
""" %\
(treeid, 0, search_select) # 0 is the action index associated
# to the search functionality. This
# means that this action is the
# first to be registered in WebApplication.
tree_panel_html = '<div id="tree_panel">' + search_form + html_features + buttons + '</div>'
# Now we render the tree into image and get the HTML that handles it
tree_html = application._get_tree_img(treeid = treeid)
# Let's return enriched HTML
return tree_panel_html + tree_html
# ==============================================================================
#
# Main WSGI Application
#
# ==============================================================================
# Create a basic ETE WebTreeApplication
application = WebTreeApplication()
# Set your temporal dir to allow web user to generate files. This two
# paths should point to the same place, one using the absolute path in
# your system, and the other the URL to access the same
# directory. Note that the referred directory must be writable by the
# webserver.
#application.CONFIG["temp_dir"] = "/home/services/web/etetoolkit.org/webplugin/tmp/"
application.CONFIG["temp_dir"] = "/var/www/etetoolkit.org/webplugin/tmp/"
application.CONFIG["temp_url"] = "/webplugin/tmp/" # Relative to web site Document Root
# Set the DISPLAY port that ETE should use to draw pictures. You will
# need a X server installed in your server and allow webserver user to
# access the display. If the X server is started by a different user
# and www-data (usally the apache user) cannot access display, try
# modifiying DISPLAY permisions by executing "xhost +"
application.CONFIG["DISPLAY"] = ":0" # This is the most common
# configuration
# We extend the minimum WebTreeApplication with our own WSGI
# application
application.set_external_app_handler(example_app)
# Lets now apply our custom tree loader function to the main
# application
application.set_tree_loader(my_tree_loader)
# And our layout as the default one to render trees
ts = TreeStyle()
ts.show_leaf_name = False
ts.layout_fn.append(main_layout)
ts.mode = "r"
ts.branch_vertical_margin = 5
#ts.scale = 20
application.set_tree_style(ts)
#application.set_default_layout_fn(main_layout)
application.set_tree_size(None, None)
# I want to make up how tree image in shown using a custrom tree
# renderer that adds much more HTML code
application.set_external_tree_renderer(tree_renderer)
# ==============================================================================
# ADD CUSTOM ACTIONS TO THE APPLICATION
#
# The function "register_action" allows to attach functionality to
# nodes in the image. All registered accions will be shown in the
# popup menu bound to the nodes and faces in the web image.
#
#
# register_action(action_name, target_type=["node"|"face"|"layout"|"search"], \
# action_handler, action_checker, html_generator_handler)
#
# When the Application is executed it will read your registered
# acctions and will do the following:
#
# 1. Load the tree and get the image map
#
# 2. For each node and face in the tree, it will browse all registered
# actions and will run the action_checker function to determine if the
# action must be activated for such node or face
#
# 3. If action_checker(node) returns True, the action will be attached
# to the context menu of that specific node or face, otherwise it will
# be hidden.
#
# 4. When a click is done on a specific node, popup menus will be
# built using their active actions. For this, ETE will use the
# html_generator function associated to each function if
# available. Otherwise, a popup entry will be added automatically.
#
# 5. When a certain action is pressed in the popup menus, the
# action_handler function attached to the action will be executed over
# its corresponding node, and the tree image will be refreshed.
#
# Special values:
#
# action_checker = None : It will be interpreted as "Show allways"
# html_generator = None : Autogenerate html and link to action
# action_handler = None : Action will be ignored
#
# ==============================================================================
# We first register the special action "search" which is attached to
# our custom search function.
application.register_action("", "search", search_by_feature, None, None)
# Node manipulation options (bound to node items and all their faces)
application.register_action("branch_info", "node", None, None, branch_info)
application.register_action("<b>Collapse</b>", "node", collapse, can_collapse, None)
application.register_action("Expand", "node", expand, can_expand, None)
application.register_action("Highlight background", "node", set_bg, None, None)
application.register_action("Set as root", "node", set_as_root, None, None)
application.register_action("Swap children", "node", swap_branches, is_not_leaf, None)
application.register_action("Pay me a compliment", "face", set_red, None, None)
# Actions attached to node's content (shown as text faces)
application.register_action("divider", "face", None, None, external_links_divider)
application.register_action("Default layout", "layout", main_layout, None, None)
application.register_action("Clean layout", "layout", main_layout, None, None)
| 40.103943 | 176 | 0.616186 |
07d5550ef52c653f84cf878720a7ac5016c42b06 | 368 | py | Python | .history/ClassFiles/DataStructures/Set_20210101141417.py | minefarmer/Comprehensive-Python | f97b9b83ec328fc4e4815607e6a65de90bb8de66 | [
"Unlicense"
] | null | null | null | .history/ClassFiles/DataStructures/Set_20210101141417.py | minefarmer/Comprehensive-Python | f97b9b83ec328fc4e4815607e6a65de90bb8de66 | [
"Unlicense"
] | null | null | null | .history/ClassFiles/DataStructures/Set_20210101141417.py | minefarmer/Comprehensive-Python | f97b9b83ec328fc4e4815607e6a65de90bb8de66 | [
"Unlicense"
] | null | null | null | ''' SETS
A set is a collection of values.
Values in a set are not ordered.
Values in a set are not indexed.
How to create a Set with a comstructor() # 18
'''
fruits = {"grapes", "apples", "berries"}
for x in fruits:
print(x) # apples
# grapes
# berries
animals = set(("lion", "tiger","bear")) # using a constructor
| 19.368421 | 62 | 0.581522 |
12d4c3cfe8a4d1b66778ca542339d91cb3a1cfb8 | 2,262 | py | Python | e-learning/educa/students/views.py | EssaAlshammri/django-by-example | d1a1cba9308d4f19bbb1228dbd191ad5540b2c78 | [
"MIT"
] | 3 | 2017-04-25T10:19:02.000Z | 2017-06-07T12:50:30.000Z | e-learning/educa/students/views.py | EssaAlshammri/django-by-example | d1a1cba9308d4f19bbb1228dbd191ad5540b2c78 | [
"MIT"
] | null | null | null | e-learning/educa/students/views.py | EssaAlshammri/django-by-example | d1a1cba9308d4f19bbb1228dbd191ad5540b2c78 | [
"MIT"
] | null | null | null | from django.contrib.auth import authenticate, login
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, FormView
from django.views.generic.list import ListView
from courses.models import Course
from .forms import CourseEnrollForm
class StudentRegistrationView(CreateView):
template_name = 'students/student/registration.html'
form_class = UserCreationForm
success_url = reverse_lazy('student_course_list')
def form_valid(self, form):
result = super(StudentRegistrationView, self).form_valid(form)
cd = form.cleaned_data
user = authenticate(username=cd['username'], password=cd['password1'])
login(self.request, user)
return result
class StudentEnrollCourseView(LoginRequiredMixin, FormView):
course = None
form_class = CourseEnrollForm
def form_valid(self, form):
self.course = form.cleaned_data['course']
self.course.students.add(self.request.user)
return super(StudentEnrollCourseView, self).form_valid(form)
def get_success_url(self):
return reverse_lazy('student_course_detail', args=[self.course.id])
class StudentCourseListView(LoginRequiredMixin, ListView):
model = Course
template_name = 'students/course/list.html'
def get_queryset(self):
qs = super(StudentCourseListView, self).get_queryset()
return qs.filter(students__in=[self.request.user])
class StudentCourseDetailView(DetailView):
model = Course
template_name = 'students/course/detail.html'
def get_queryset(self):
qs = super(StudentCourseDetailView, self).get_queryset()
return qs.filter(students__in=[self.request.user])
def get_context_data(self, **kwargs):
context = super(
StudentCourseDetailView, self).get_context_data(**kwargs)
course = self.get_object()
if 'module_id' in self.kwargs:
context['module'] = course.modules.get(id=self.kwargs['module_id'])
else:
context['module'] = course.modules.all()[0]
return context
| 34.272727 | 79 | 0.72458 |
41da5c0267b07eb97a622001272301fe1dad7c06 | 12,022 | py | Python | tests/components/vizio/test_config_flow.py | shexbeer/home-assistant | 883b8f21ce74120c28890bde64f1175723be11c7 | [
"Apache-2.0"
] | 1 | 2021-03-17T10:03:42.000Z | 2021-03-17T10:03:42.000Z | tests/components/vizio/test_config_flow.py | shexbeer/home-assistant | 883b8f21ce74120c28890bde64f1175723be11c7 | [
"Apache-2.0"
] | null | null | null | tests/components/vizio/test_config_flow.py | shexbeer/home-assistant | 883b8f21ce74120c28890bde64f1175723be11c7 | [
"Apache-2.0"
] | null | null | null | """Tests for Vizio config flow."""
import pytest
import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.components.media_player import DEVICE_CLASS_SPEAKER, DEVICE_CLASS_TV
from homeassistant.components.vizio.const import (
CONF_VOLUME_STEP,
DEFAULT_NAME,
DEFAULT_VOLUME_STEP,
DOMAIN,
VIZIO_SCHEMA,
)
from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER, SOURCE_ZEROCONF
from homeassistant.const import (
CONF_ACCESS_TOKEN,
CONF_DEVICE_CLASS,
CONF_HOST,
CONF_NAME,
)
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
ACCESS_TOKEN,
HOST,
HOST2,
MOCK_IMPORT_VALID_TV_CONFIG,
MOCK_INVALID_TV_CONFIG,
MOCK_SPEAKER_CONFIG,
MOCK_USER_VALID_TV_CONFIG,
MOCK_ZEROCONF_SERVICE_INFO,
NAME,
NAME2,
UNIQUE_ID,
VOLUME_STEP,
)
from tests.common import MockConfigEntry
async def test_user_flow_minimum_fields(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test user config flow with minimum fields."""
# test form shows
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_SPEAKER_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == NAME
assert result["data"][CONF_NAME] == NAME
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_DEVICE_CLASS] == DEVICE_CLASS_SPEAKER
async def test_user_flow_all_fields(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test user config flow with all fields."""
# test form shows
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_USER_VALID_TV_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == NAME
assert result["data"][CONF_NAME] == NAME
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_DEVICE_CLASS] == DEVICE_CLASS_TV
assert result["data"][CONF_ACCESS_TOKEN] == ACCESS_TOKEN
async def test_options_flow(hass: HomeAssistantType) -> None:
"""Test options config flow."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_SPEAKER_CONFIG)
entry.add_to_hass(hass)
assert not entry.options
result = await hass.config_entries.options.async_init(entry.entry_id, data=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={CONF_VOLUME_STEP: VOLUME_STEP}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == ""
assert result["data"][CONF_VOLUME_STEP] == VOLUME_STEP
async def test_user_host_already_configured(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test host is already configured during user setup."""
entry = MockConfigEntry(
domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, options={CONF_VOLUME_STEP: VOLUME_STEP}
)
entry.add_to_hass(hass)
fail_entry = MOCK_SPEAKER_CONFIG.copy()
fail_entry[CONF_NAME] = "newtestname"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=fail_entry
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_HOST: "host_exists"}
async def test_user_name_already_configured(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test name is already configured during user setup."""
entry = MockConfigEntry(
domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, options={CONF_VOLUME_STEP: VOLUME_STEP}
)
entry.add_to_hass(hass)
fail_entry = MOCK_SPEAKER_CONFIG.copy()
fail_entry[CONF_HOST] = "0.0.0.0"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=fail_entry
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_NAME: "name_exists"}
async def test_user_esn_already_exists(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test ESN is already configured with different host and name during user setup."""
# Set up new entry
MockConfigEntry(
domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID
).add_to_hass(hass)
# Set up new entry with same unique_id but different host and name
fail_entry = MOCK_SPEAKER_CONFIG.copy()
fail_entry[CONF_HOST] = HOST2
fail_entry[CONF_NAME] = NAME2
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=fail_entry
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_setup_with_diff_host_and_name"
async def test_user_error_on_could_not_connect(
hass: HomeAssistantType, vizio_cant_connect: pytest.fixture
) -> None:
"""Test with could_not_connect during user_setup."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cant_connect"}
async def test_user_error_on_tv_needs_token(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test when config fails custom validation for non null access token when device_class = tv during user setup."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=MOCK_INVALID_TV_CONFIG
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "tv_needs_token"}
async def test_import_flow_minimum_fields(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test import config flow with minimum fields."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=vol.Schema(VIZIO_SCHEMA)(
{CONF_HOST: HOST, CONF_DEVICE_CLASS: DEVICE_CLASS_SPEAKER}
),
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == DEFAULT_NAME
assert result["data"][CONF_NAME] == DEFAULT_NAME
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_DEVICE_CLASS] == DEVICE_CLASS_SPEAKER
assert result["data"][CONF_VOLUME_STEP] == DEFAULT_VOLUME_STEP
async def test_import_flow_all_fields(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test import config flow with all fields."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=vol.Schema(VIZIO_SCHEMA)(MOCK_IMPORT_VALID_TV_CONFIG),
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == NAME
assert result["data"][CONF_NAME] == NAME
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_DEVICE_CLASS] == DEVICE_CLASS_TV
assert result["data"][CONF_ACCESS_TOKEN] == ACCESS_TOKEN
assert result["data"][CONF_VOLUME_STEP] == VOLUME_STEP
async def test_import_entity_already_configured(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test entity is already configured during import setup."""
entry = MockConfigEntry(
domain=DOMAIN,
data=vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG),
options={CONF_VOLUME_STEP: VOLUME_STEP},
)
entry.add_to_hass(hass)
fail_entry = vol.Schema(VIZIO_SCHEMA)(MOCK_SPEAKER_CONFIG.copy())
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=fail_entry
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_setup"
async def test_import_flow_update_options(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_update: pytest.fixture,
) -> None:
"""Test import config flow with updated options."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=vol.Schema(VIZIO_SCHEMA)(MOCK_IMPORT_VALID_TV_CONFIG),
)
await hass.async_block_till_done()
assert result["result"].options == {CONF_VOLUME_STEP: VOLUME_STEP}
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
entry_id = result["result"].entry_id
updated_config = MOCK_IMPORT_VALID_TV_CONFIG.copy()
updated_config[CONF_VOLUME_STEP] = VOLUME_STEP + 1
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=vol.Schema(VIZIO_SCHEMA)(updated_config),
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "updated_options"
assert (
hass.config_entries.async_get_entry(entry_id).options[CONF_VOLUME_STEP]
== VOLUME_STEP + 1
)
async def test_zeroconf_flow(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
vizio_guess_device_type: pytest.fixture,
) -> None:
"""Test zeroconf config flow."""
discovery_info = MOCK_ZEROCONF_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=discovery_info
)
# Form should always show even if all required properties are discovered
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# Apply discovery updates to entry to mimic when user hits submit without changing
# defaults which were set from discovery parameters
user_input = result["data_schema"](discovery_info)
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=user_input
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == NAME
assert result["data"][CONF_HOST] == HOST
assert result["data"][CONF_NAME] == NAME
assert result["data"][CONF_DEVICE_CLASS] == DEVICE_CLASS_SPEAKER
async def test_zeroconf_flow_already_configured(
hass: HomeAssistantType,
vizio_connect: pytest.fixture,
vizio_bypass_setup: pytest.fixture,
) -> None:
"""Test entity is already configured during zeroconf setup."""
entry = MockConfigEntry(
domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, options={CONF_VOLUME_STEP: VOLUME_STEP}
)
entry.add_to_hass(hass)
# Try rediscovering same device
discovery_info = MOCK_ZEROCONF_SERVICE_INFO.copy()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_ZEROCONF}, data=discovery_info
)
# Flow should abort because device is already setup
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_setup"
| 33.960452 | 118 | 0.719514 |
c3b1d5f29e7d4d47ed12b259e9cf3080d6a7c1c6 | 4,943 | py | Python | logging_loki/emitter.py | BlackSmith/python-logging-loki | 45fa383cd2dea38970514a2db7ac0bd4cb2ebb19 | [
"MIT"
] | null | null | null | logging_loki/emitter.py | BlackSmith/python-logging-loki | 45fa383cd2dea38970514a2db7ac0bd4cb2ebb19 | [
"MIT"
] | null | null | null | logging_loki/emitter.py | BlackSmith/python-logging-loki | 45fa383cd2dea38970514a2db7ac0bd4cb2ebb19 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import abc
import copy
import functools
import logging
import time
from logging.config import ConvertingDict
from typing import Any
from typing import Dict
from typing import Optional
from typing import Tuple
import requests
import rfc3339
from logging_loki import const
BasicAuth = Optional[Tuple[str, str]]
class LokiEmitter(abc.ABC):
"""Base Loki emitter class."""
success_response_code = const.success_response_code
level_tag = const.level_tag
logger_tag = const.logger_tag
label_allowed_chars = const.label_allowed_chars
label_replace_with = const.label_replace_with
session_class = requests.Session
timeout = const.timeout
def __init__(self, url: str, tags: Optional[dict] = None, auth: BasicAuth = None):
"""
Create new Loki emitter.
Arguments:
url: Endpoint used to send log entries to Loki (e.g. `https://my-loki-instance/loki/api/v1/push`).
tags: Default tags added to every log record.
auth: Optional tuple with username and password for basic HTTP authentication.
"""
#: Tags that will be added to all records handled by this handler.
self.tags = tags or {}
#: Loki JSON push endpoint (e.g `http://127.0.0.1/loki/api/v1/push`)
self.url = url
#: Optional tuple with username and password for basic authentication.
self.auth = auth
self._session = None
def __call__(self, record: logging.LogRecord, line: str):
"""Send log record to Loki."""
payload = self.build_payload(record, line)
resp = self.session.post(self.url, json=payload, timeout=self.timeout)
if resp.status_code != self.success_response_code:
raise ValueError("Unexpected Loki API response status code: {0}".format(resp.status_code))
@abc.abstractmethod
def build_payload(self, record: logging.LogRecord, line) -> dict:
"""Build JSON payload with a log entry."""
raise NotImplementedError # pragma: no cover
@property
def session(self) -> requests.Session:
"""Create HTTP session."""
if self._session is None:
self._session = self.session_class()
self._session.auth = self.auth or None
return self._session
def close(self):
"""Close HTTP session."""
if self._session is not None:
self._session.close()
self._session = None
@functools.lru_cache(const.format_label_lru_size)
def format_label(self, label: str) -> str:
"""
Build label to match prometheus format.
`Label format <https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels>`_
"""
for char_from, char_to in self.label_replace_with:
label = label.replace(char_from, char_to)
return "".join(char for char in label if char in self.label_allowed_chars)
def build_tags(self, record: logging.LogRecord) -> Dict[str, Any]:
"""Return tags that must be send to Loki with a log record."""
tags = dict(self.tags) if isinstance(self.tags, ConvertingDict) else self.tags
tags = copy.deepcopy(tags)
tags[self.level_tag] = record.levelname.lower()
tags[self.logger_tag] = record.name
extra_tags = getattr(record, "tags", {})
if not isinstance(extra_tags, dict):
return tags
for tag_name, tag_value in extra_tags.items():
cleared_name = self.format_label(tag_name)
if cleared_name:
tags[cleared_name] = tag_value
return tags
class LokiEmitterV0(LokiEmitter):
"""Emitter for Loki < 0.4.0."""
def build_payload(self, record: logging.LogRecord, line) -> dict:
"""Build JSON payload with a log entry."""
labels = self.build_labels(record)
ts = rfc3339.format_microsecond(record.created)
stream = {
"labels": labels,
"entries": [{"ts": ts, "line": line}],
}
return {"streams": [stream]}
def build_labels(self, record: logging.LogRecord) -> str:
"""Return Loki labels string."""
labels = []
for label_name, label_value in self.build_tags(record).items():
cleared_name = self.format_label(str(label_name))
cleared_value = str(label_value).replace('"', r"\"")
labels.append('{0}="{1}"'.format(cleared_name, cleared_value))
return "{{{0}}}".format(",".join(labels))
class LokiEmitterV1(LokiEmitter):
"""Emitter for Loki >= 0.4.0."""
def build_payload(self, record: logging.LogRecord, line) -> dict:
"""Build JSON payload with a log entry."""
labels = self.build_tags(record)
ns = 1e9
ts = str(int(time.time() * ns))
stream = {
"stream": labels,
"values": [[ts, line]],
}
return {"streams": [stream]}
| 34.326389 | 110 | 0.630791 |
f9485a21eaf87b887c01d593396b4861f80225a8 | 1,494 | py | Python | 05.Popular reading blog rankings and cache speed/mysite/views.py | ReverseScale/DjangoProject | b82fffdef5084f616f9de86516254d6398ba08e8 | [
"MIT"
] | 1 | 2019-02-20T02:33:25.000Z | 2019-02-20T02:33:25.000Z | 05.Popular reading blog rankings and cache speed/mysite/views.py | ReverseScale/DjangoProject | b82fffdef5084f616f9de86516254d6398ba08e8 | [
"MIT"
] | null | null | null | 05.Popular reading blog rankings and cache speed/mysite/views.py | ReverseScale/DjangoProject | b82fffdef5084f616f9de86516254d6398ba08e8 | [
"MIT"
] | null | null | null | import datetime
from django.shortcuts import render_to_response
from django.contrib.contenttypes.models import ContentType
from django.utils import timezone
from django.db.models import Sum
from django.core.cache import cache
from read_statistics.utils import get_seven_days_read_data, get_today_hot_data, get_yesterday_hot_data
from blog.models import Blog
def get_7_days_hot_blogs():
today = timezone.now().date()
date = today - datetime.timedelta(days=7)
blogs = Blog.objects \
.filter(read_details__date__lt=today, read_details__date__gte=date) \
.values('id', 'title') \
.annotate(read_num_sum=Sum('read_details__read_num')) \
.order_by('-read_num_sum')
return blogs[:7]
def home(request):
blog_content_type = ContentType.objects.get_for_model(Blog)
dates, read_nums = get_seven_days_read_data(blog_content_type)
# 获取7天热门博客的缓存数据
hot_blogs_for_7_days = cache.get('hot_blogs_for_7_days')
if hot_blogs_for_7_days is None:
hot_blogs_for_7_days = get_7_days_hot_blogs()
cache.set('hot_blogs_for_7_days', hot_blogs_for_7_days, 3600)
context = {}
context['dates'] = dates
context['read_nums'] = read_nums
context['today_hot_data'] = get_today_hot_data(blog_content_type)
context['yesterday_hot_data'] = get_yesterday_hot_data(blog_content_type)
context['hot_blogs_for_7_days'] = hot_blogs_for_7_days
return render_to_response('home.html', context)
| 40.378378 | 102 | 0.742972 |
eb7872548f809875e9fdcfc9dbef5e904d71c431 | 69,229 | py | Python | meraki_v1/api/networks.py | zabrewer/dashboard-api-python | bc21b6852e3167dcdf79585928a963efebb9d0ee | [
"MIT"
] | null | null | null | meraki_v1/api/networks.py | zabrewer/dashboard-api-python | bc21b6852e3167dcdf79585928a963efebb9d0ee | [
"MIT"
] | null | null | null | meraki_v1/api/networks.py | zabrewer/dashboard-api-python | bc21b6852e3167dcdf79585928a963efebb9d0ee | [
"MIT"
] | null | null | null | class Networks(object):
def __init__(self, session):
super(Networks, self).__init__()
self._session = session
def getNetwork(self, networkId: str):
"""
**Return a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure'],
'operation': 'getNetwork',
}
resource = f'/networks/{networkId}'
return self._session.get(metadata, resource)
def updateNetwork(self, networkId: str, **kwargs):
"""
**Update a network**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network
- networkId (string)
- name (string): The name of the network
- timeZone (string): The timezone of the network. For a list of allowed timezones, please see the 'TZ' column in the table in <a target='_blank' href='https://en.wikipedia.org/wiki/List_of_tz_database_time_zones'>this article.</a>
- tags (string): A space-separated list of tags to be applied to the network
- enrollmentString (string): A unique identifier which can be used for device enrollment or easy access through the Meraki SM Registration page or the Self Service Portal. Please note that changing this field may cause existing bookmarks to break.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure'],
'operation': 'updateNetwork',
}
resource = f'/networks/{networkId}'
body_params = ['name', 'timeZone', 'tags', 'enrollmentString']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def deleteNetwork(self, networkId: str):
"""
**Delete a network**
https://developer.cisco.com/docs/meraki-api-v1/#!delete-network
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure'],
'operation': 'deleteNetwork',
}
resource = f'/networks/{networkId}'
return self._session.delete(metadata, resource)
def getNetworkAlertSettings(self, networkId: str):
"""
**Return the alert configuration for this network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-alert-settings
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'alertSettings'],
'operation': 'getNetworkAlertSettings',
}
resource = f'/networks/{networkId}/alertSettings'
return self._session.get(metadata, resource)
def updateNetworkAlertSettings(self, networkId: str, **kwargs):
"""
**Update the alert configuration for this network**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-alert-settings
- networkId (string)
- defaultDestinations (object): The network-wide destinations for all alerts on the network.
- alerts (array): Alert-specific configuration for each type. Only alerts that pertain to the network can be updated.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'alertSettings'],
'operation': 'updateNetworkAlertSettings',
}
resource = f'/networks/{networkId}/alertSettings'
body_params = ['defaultDestinations', 'alerts']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def bindNetwork(self, networkId: str, configTemplateId: str, **kwargs):
"""
**Bind a network to a template.**
https://developer.cisco.com/docs/meraki-api-v1/#!bind-network
- networkId (string)
- configTemplateId (string): The ID of the template to which the network should be bound.
- autoBind (boolean): Optional boolean indicating whether the network's switches should automatically bind to profiles of the same model. Defaults to false if left unspecified. This option only affects switch networks and switch templates. Auto-bind is not valid unless the switch template has at least one profile and has at most one profile per switch model.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure'],
'operation': 'bindNetwork',
}
resource = f'/networks/{networkId}/bind'
body_params = ['configTemplateId', 'autoBind']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def getNetworkBluetoothClients(self, networkId: str, total_pages=1, direction='next', **kwargs):
"""
**List the Bluetooth clients seen by APs in this network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-bluetooth-clients
- networkId (string)
- total_pages (integer or string): total number of pages to retrieve, -1 or "all" for all pages
- direction (string): direction to paginate, either "next" (default) or "prev" page
- t0 (string): The beginning of the timespan for the data. The maximum lookback period is 7 days from today.
- timespan (number): The timespan for which the information will be fetched. If specifying timespan, do not specify parameter t0. The value must be in seconds and be less than or equal to 7 days. The default is 1 day.
- perPage (integer): The number of entries per page returned. Acceptable range is 5 - 1000. Default is 10.
- startingAfter (string): A token used by the server to indicate the start of the page. Often this is a timestamp or an ID but it is not limited to those. This parameter should not be defined by client applications. The link for the first, last, prev, or next page in the HTTP Link header should define it.
- endingBefore (string): A token used by the server to indicate the end of the page. Often this is a timestamp or an ID but it is not limited to those. This parameter should not be defined by client applications. The link for the first, last, prev, or next page in the HTTP Link header should define it.
- includeConnectivityHistory (boolean): Include the connectivity history for this client
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'monitor', 'bluetoothClients'],
'operation': 'getNetworkBluetoothClients',
}
resource = f'/networks/{networkId}/bluetoothClients'
query_params = ['t0', 'timespan', 'perPage', 'startingAfter', 'endingBefore', 'includeConnectivityHistory']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get_pages(metadata, resource, params, total_pages, direction)
def getNetworkBluetoothClient(self, networkId: str, bluetoothClientId: str, **kwargs):
"""
**Return a Bluetooth client. Bluetooth clients can be identified by their ID or their MAC.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-bluetooth-client
- networkId (string)
- bluetoothClientId (string)
- includeConnectivityHistory (boolean): Include the connectivity history for this client
- connectivityHistoryTimespan (integer): The timespan, in seconds, for the connectivityHistory data. By default 1 day, 86400, will be used.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'monitor', 'bluetoothClients'],
'operation': 'getNetworkBluetoothClient',
}
resource = f'/networks/{networkId}/bluetoothClients/{bluetoothClientId}'
query_params = ['includeConnectivityHistory', 'connectivityHistoryTimespan']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get(metadata, resource, params)
def getNetworkClients(self, networkId: str, total_pages=1, direction='next', **kwargs):
"""
**List the clients that have used this network in the timespan**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-clients
- networkId (string)
- total_pages (integer or string): total number of pages to retrieve, -1 or "all" for all pages
- direction (string): direction to paginate, either "next" (default) or "prev" page
- t0 (string): The beginning of the timespan for the data. The maximum lookback period is 31 days from today.
- timespan (number): The timespan for which the information will be fetched. If specifying timespan, do not specify parameter t0. The value must be in seconds and be less than or equal to 31 days. The default is 1 day.
- perPage (integer): The number of entries per page returned. Acceptable range is 3 - 1000. Default is 10.
- startingAfter (string): A token used by the server to indicate the start of the page. Often this is a timestamp or an ID but it is not limited to those. This parameter should not be defined by client applications. The link for the first, last, prev, or next page in the HTTP Link header should define it.
- endingBefore (string): A token used by the server to indicate the end of the page. Often this is a timestamp or an ID but it is not limited to those. This parameter should not be defined by client applications. The link for the first, last, prev, or next page in the HTTP Link header should define it.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'monitor', 'clients'],
'operation': 'getNetworkClients',
}
resource = f'/networks/{networkId}/clients'
query_params = ['t0', 'timespan', 'perPage', 'startingAfter', 'endingBefore']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get_pages(metadata, resource, params, total_pages, direction)
def provisionNetworkClients(self, networkId: str, clients: list, devicePolicy: str, **kwargs):
"""
**Provisions a client with a name and policy. Clients can be provisioned before they associate to the network.**
https://developer.cisco.com/docs/meraki-api-v1/#!provision-network-clients
- networkId (string)
- clients (array): The array of clients to provision
- devicePolicy (string): The policy to apply to the specified client. Can be 'Group policy', 'Whitelisted', 'Blocked', 'Per connection' or 'Normal'. Required.
- groupPolicyId (string): The ID of the desired group policy to apply to the client. Required if 'devicePolicy' is set to "Group policy". Otherwise this is ignored.
- policiesBySecurityAppliance (object): An object, describing what the policy-connection association is for the security appliance. (Only relevant if the security appliance is actually within the network)
- policiesBySsid (object): An object, describing the policy-connection associations for each active SSID within the network. Keys should be the number of enabled SSIDs, mapping to an object describing the client's policy
"""
kwargs.update(locals())
if 'devicePolicy' in kwargs:
options = ['Group policy', 'Whitelisted', 'Blocked', 'Per connection', 'Normal']
assert kwargs['devicePolicy'] in options, f'''"devicePolicy" cannot be "{kwargs['devicePolicy']}", & must be set to one of: {options}'''
metadata = {
'tags': ['networks', 'configure', 'clients'],
'operation': 'provisionNetworkClients',
}
resource = f'/networks/{networkId}/clients/provision'
body_params = ['clients', 'devicePolicy', 'groupPolicyId', 'policiesBySecurityAppliance', 'policiesBySsid']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def getNetworkClient(self, networkId: str, clientId: str):
"""
**Return the client associated with the given identifier. Clients can be identified by a client key or either the MAC or IP depending on whether the network uses Track-by-IP.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-client
- networkId (string)
- clientId (string)
"""
metadata = {
'tags': ['networks', 'monitor', 'clients'],
'operation': 'getNetworkClient',
}
resource = f'/networks/{networkId}/clients/{clientId}'
return self._session.get(metadata, resource)
def getNetworkClientPolicy(self, networkId: str, clientId: str):
"""
**Return the policy assigned to a client on the network. Clients can be identified by a client key or either the MAC or IP depending on whether the network uses Track-by-IP.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-client-policy
- networkId (string)
- clientId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'clients', 'policy'],
'operation': 'getNetworkClientPolicy',
}
resource = f'/networks/{networkId}/clients/{clientId}/policy'
return self._session.get(metadata, resource)
def updateNetworkClientPolicy(self, networkId: str, clientId: str, devicePolicy: str, **kwargs):
"""
**Update the policy assigned to a client on the network. Clients can be identified by a client key or either the MAC or IP depending on whether the network uses Track-by-IP.**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-client-policy
- networkId (string)
- clientId (string)
- devicePolicy (string): The policy to assign. Can be 'Whitelisted', 'Blocked', 'Normal' or 'Group policy'. Required.
- groupPolicyId (string): [optional] If 'devicePolicy' is set to 'Group policy' this param is used to specify the group policy ID.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'clients', 'policy'],
'operation': 'updateNetworkClientPolicy',
}
resource = f'/networks/{networkId}/clients/{clientId}/policy'
body_params = ['devicePolicy', 'groupPolicyId']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def getNetworkClientSplashAuthorizationStatus(self, networkId: str, clientId: str):
"""
**Return the splash authorization for a client, for each SSID they've associated with through splash. Only enabled SSIDs with Click-through splash enabled will be included. Clients can be identified by a client key or either the MAC or IP depending on whether the network uses Track-by-IP.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-client-splash-authorization-status
- networkId (string)
- clientId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'clients', 'splashAuthorizationStatus'],
'operation': 'getNetworkClientSplashAuthorizationStatus',
}
resource = f'/networks/{networkId}/clients/{clientId}/splashAuthorizationStatus'
return self._session.get(metadata, resource)
def updateNetworkClientSplashAuthorizationStatus(self, networkId: str, clientId: str, ssids: dict):
"""
**Update a client's splash authorization. Clients can be identified by a client key or either the MAC or IP depending on whether the network uses Track-by-IP.**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-client-splash-authorization-status
- networkId (string)
- clientId (string)
- ssids (object): The target SSIDs. Each SSID must be enabled and must have Click-through splash enabled. For each SSID where isAuthorized is true, the expiration time will automatically be set according to the SSID's splash frequency. Not all networks support configuring all SSIDs
"""
kwargs = locals()
metadata = {
'tags': ['networks', 'configure', 'clients', 'splashAuthorizationStatus'],
'operation': 'updateNetworkClientSplashAuthorizationStatus',
}
resource = f'/networks/{networkId}/clients/{clientId}/splashAuthorizationStatus'
body_params = ['ssids']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def getNetworkClientTrafficHistory(self, networkId: str, clientId: str, total_pages=1, direction='next', **kwargs):
"""
**Return the client's network traffic data over time. Usage data is in kilobytes. This endpoint requires detailed traffic analysis to be enabled on the Network-wide > General page. Clients can be identified by a client key or either the MAC or IP depending on whether the network uses Track-by-IP.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-client-traffic-history
- networkId (string)
- clientId (string)
- total_pages (integer or string): total number of pages to retrieve, -1 or "all" for all pages
- direction (string): direction to paginate, either "next" (default) or "prev" page
- perPage (integer): The number of entries per page returned. Acceptable range is 3 - 1000.
- startingAfter (string): A token used by the server to indicate the start of the page. Often this is a timestamp or an ID but it is not limited to those. This parameter should not be defined by client applications. The link for the first, last, prev, or next page in the HTTP Link header should define it.
- endingBefore (string): A token used by the server to indicate the end of the page. Often this is a timestamp or an ID but it is not limited to those. This parameter should not be defined by client applications. The link for the first, last, prev, or next page in the HTTP Link header should define it.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'monitor', 'clients', 'trafficHistory'],
'operation': 'getNetworkClientTrafficHistory',
}
resource = f'/networks/{networkId}/clients/{clientId}/trafficHistory'
query_params = ['perPage', 'startingAfter', 'endingBefore']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get_pages(metadata, resource, params, total_pages, direction)
def getNetworkClientUsageHistory(self, networkId: str, clientId: str):
"""
**Return the client's daily usage history. Usage data is in kilobytes. Clients can be identified by a client key or either the MAC or IP depending on whether the network uses Track-by-IP.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-client-usage-history
- networkId (string)
- clientId (string)
"""
metadata = {
'tags': ['networks', 'monitor', 'clients', 'usageHistory'],
'operation': 'getNetworkClientUsageHistory',
}
resource = f'/networks/{networkId}/clients/{clientId}/usageHistory'
return self._session.get(metadata, resource)
def getNetworkDevices(self, networkId: str):
"""
**List the devices in a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-devices
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'devices'],
'operation': 'getNetworkDevices',
}
resource = f'/networks/{networkId}/devices'
return self._session.get(metadata, resource)
def claimNetworkDevices(self, networkId: str, serials: list):
"""
**Claim devices into a network**
https://developer.cisco.com/docs/meraki-api-v1/#!claim-network-devices
- networkId (string)
- serials (array): A list of serials of devices to claim
"""
kwargs = locals()
metadata = {
'tags': ['networks', 'configure', 'devices'],
'operation': 'claimNetworkDevices',
}
resource = f'/networks/{networkId}/devices/claim'
body_params = ['serials']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def removeNetworkDevices(self, networkId: str, serial: str):
"""
**Remove a single device**
https://developer.cisco.com/docs/meraki-api-v1/#!remove-network-devices
- networkId (string)
- serial (string): The serial of a device
"""
kwargs = locals()
metadata = {
'tags': ['networks', 'configure', 'devices'],
'operation': 'removeNetworkDevices',
}
resource = f'/networks/{networkId}/devices/remove'
body_params = ['serial']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def getNetworkEvents(self, networkId: str, total_pages=1, direction='prev', **kwargs):
"""
**List the events for the network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-events
- networkId (string)
- total_pages (integer or string): total number of pages to retrieve, -1 or "all" for all pages
- direction (string): direction to paginate, either "prev" (default) or "next" page
- productType (string): The product type to fetch events for. This parameter is required for networks with multiple device types. Valid types are wireless, appliance, switch, systemsManager, camera, and cellularGateway
- includedEventTypes (array): A list of event types. The returned events will be filtered to only include events with these types.
- excludedEventTypes (array): A list of event types. The returned events will be filtered to exclude events with these types.
- deviceMac (string): The MAC address of the Meraki device which the list of events will be filtered with
- deviceSerial (string): The serial of the Meraki device which the list of events will be filtered with
- deviceName (string): The name of the Meraki device which the list of events will be filtered with
- clientIp (string): The IP of the client which the list of events will be filtered with. Only supported for track-by-IP networks.
- clientMac (string): The MAC address of the client which the list of events will be filtered with. Only supported for track-by-MAC networks.
- clientName (string): The name, or partial name, of the client which the list of events will be filtered with
- smDeviceMac (string): The MAC address of the Systems Manager device which the list of events will be filtered with
- smDeviceName (string): The name of the Systems Manager device which the list of events will be filtered with
- perPage (integer): The number of entries per page returned. Acceptable range is 3 - 1000. Default is 10.
- startingAfter (string): A token used by the server to indicate the start of the page. Often this is a timestamp or an ID but it is not limited to those. This parameter should not be defined by client applications. The link for the first, last, prev, or next page in the HTTP Link header should define it.
- endingBefore (string): A token used by the server to indicate the end of the page. Often this is a timestamp or an ID but it is not limited to those. This parameter should not be defined by client applications. The link for the first, last, prev, or next page in the HTTP Link header should define it.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'monitor', 'events'],
'operation': 'getNetworkEvents',
}
resource = f'/networks/{networkId}/events'
query_params = ['productType', 'deviceMac', 'deviceSerial', 'deviceName', 'clientIp', 'clientMac', 'clientName', 'smDeviceMac', 'smDeviceName', 'perPage', 'startingAfter', 'endingBefore']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
array_params = ['includedEventTypes', 'excludedEventTypes']
for (k, v) in kwargs.items():
if k in array_params:
params[f'{k}[]'] = kwargs[f'{k}']
return self._session.get_pages(metadata, resource, params, total_pages, direction)
def getNetworkEventsEventTypes(self, networkId: str):
"""
**List the event type to human-readable description**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-events-event-types
- networkId (string)
"""
metadata = {
'tags': ['networks', 'monitor', 'events', 'eventTypes'],
'operation': 'getNetworkEventsEventTypes',
}
resource = f'/networks/{networkId}/events/eventTypes'
return self._session.get(metadata, resource)
def updateNetworkFirewalledService(self, networkId: str, service: str, access: str, **kwargs):
"""
**Updates the accessibility settings for the given service ('ICMP', 'web', or 'SNMP')**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-firewalled-service
- networkId (string)
- service (string)
- access (string): A string indicating the rule for which IPs are allowed to use the specified service. Acceptable values are "blocked" (no remote IPs can access the service), "restricted" (only whitelisted IPs can access the service), and "unrestriced" (any remote IP can access the service). This field is required
- allowedIps (array): An array of whitelisted IPs that can access the service. This field is required if "access" is set to "restricted". Otherwise this field is ignored
"""
kwargs.update(locals())
if 'access' in kwargs:
options = ['blocked', 'restricted', 'unrestricted']
assert kwargs['access'] in options, f'''"access" cannot be "{kwargs['access']}", & must be set to one of: {options}'''
metadata = {
'tags': ['networks', 'configure', 'firewalledServices'],
'operation': 'updateNetworkFirewalledService',
}
resource = f'/networks/{networkId}/firewalledServices/{service}'
body_params = ['access', 'allowedIps']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def getNetworkFloorPlans(self, networkId: str):
"""
**List the floor plans that belong to your network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-floor-plans
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'floorPlans'],
'operation': 'getNetworkFloorPlans',
}
resource = f'/networks/{networkId}/floorPlans'
return self._session.get(metadata, resource)
def createNetworkFloorPlan(self, networkId: str, name: str, imageContents: str, **kwargs):
"""
**Upload a floor plan**
https://developer.cisco.com/docs/meraki-api-v1/#!create-network-floor-plan
- networkId (string)
- name (string): The name of your floor plan.
- imageContents (string): The file contents (a base 64 encoded string) of your image. Supported formats are PNG, GIF, and JPG. Note that all images are saved as PNG files, regardless of the format they are uploaded in.
- center (object): The longitude and latitude of the center of your floor plan. The 'center' or two adjacent corners (e.g. 'topLeftCorner' and 'bottomLeftCorner') must be specified. If 'center' is specified, the floor plan is placed over that point with no rotation. If two adjacent corners are specified, the floor plan is rotated to line up with the two specified points. The aspect ratio of the floor plan's image is preserved regardless of which corners/center are specified. (This means if that more than two corners are specified, only two corners may be used to preserve the floor plan's aspect ratio.). No two points can have the same latitude, longitude pair.
- bottomLeftCorner (object): The longitude and latitude of the bottom left corner of your floor plan.
- bottomRightCorner (object): The longitude and latitude of the bottom right corner of your floor plan.
- topLeftCorner (object): The longitude and latitude of the top left corner of your floor plan.
- topRightCorner (object): The longitude and latitude of the top right corner of your floor plan.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'floorPlans'],
'operation': 'createNetworkFloorPlan',
}
resource = f'/networks/{networkId}/floorPlans'
body_params = ['name', 'center', 'bottomLeftCorner', 'bottomRightCorner', 'topLeftCorner', 'topRightCorner', 'imageContents']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def getNetworkFloorPlan(self, networkId: str, floorPlanId: str):
"""
**Find a floor plan by ID**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-floor-plan
- networkId (string)
- floorPlanId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'floorPlans'],
'operation': 'getNetworkFloorPlan',
}
resource = f'/networks/{networkId}/floorPlans/{floorPlanId}'
return self._session.get(metadata, resource)
def updateNetworkFloorPlan(self, networkId: str, floorPlanId: str, **kwargs):
"""
**Update a floor plan's geolocation and other meta data**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-floor-plan
- networkId (string)
- floorPlanId (string)
- name (string): The name of your floor plan.
- center (object): The longitude and latitude of the center of your floor plan. If you want to change the geolocation data of your floor plan, either the 'center' or two adjacent corners (e.g. 'topLeftCorner' and 'bottomLeftCorner') must be specified. If 'center' is specified, the floor plan is placed over that point with no rotation. If two adjacent corners are specified, the floor plan is rotated to line up with the two specified points. The aspect ratio of the floor plan's image is preserved regardless of which corners/center are specified. (This means if that more than two corners are specified, only two corners may be used to preserve the floor plan's aspect ratio.). No two points can have the same latitude, longitude pair.
- bottomLeftCorner (object): The longitude and latitude of the bottom left corner of your floor plan.
- bottomRightCorner (object): The longitude and latitude of the bottom right corner of your floor plan.
- topLeftCorner (object): The longitude and latitude of the top left corner of your floor plan.
- topRightCorner (object): The longitude and latitude of the top right corner of your floor plan.
- imageContents (string): The file contents (a base 64 encoded string) of your new image. Supported formats are PNG, GIF, and JPG. Note that all images are saved as PNG files, regardless of the format they are uploaded in. If you upload a new image, and you do NOT specify any new geolocation fields ('center, 'topLeftCorner', etc), the floor plan will be recentered with no rotation in order to maintain the aspect ratio of your new image.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'floorPlans'],
'operation': 'updateNetworkFloorPlan',
}
resource = f'/networks/{networkId}/floorPlans/{floorPlanId}'
body_params = ['name', 'center', 'bottomLeftCorner', 'bottomRightCorner', 'topLeftCorner', 'topRightCorner', 'imageContents']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def deleteNetworkFloorPlan(self, networkId: str, floorPlanId: str):
"""
**Destroy a floor plan**
https://developer.cisco.com/docs/meraki-api-v1/#!delete-network-floor-plan
- networkId (string)
- floorPlanId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'floorPlans'],
'operation': 'deleteNetworkFloorPlan',
}
resource = f'/networks/{networkId}/floorPlans/{floorPlanId}'
return self._session.delete(metadata, resource)
def getNetworkGroupPolicies(self, networkId: str):
"""
**List the group policies in a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-group-policies
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'groupPolicies'],
'operation': 'getNetworkGroupPolicies',
}
resource = f'/networks/{networkId}/groupPolicies'
return self._session.get(metadata, resource)
def createNetworkGroupPolicy(self, networkId: str, name: str, **kwargs):
"""
**Create a group policy**
https://developer.cisco.com/docs/meraki-api-v1/#!create-network-group-policy
- networkId (string)
- name (string): The name for your group policy. Required.
- scheduling (object): The schedule for the group policy. Schedules are applied to days of the week.
- bandwidth (object): The bandwidth settings for clients bound to your group policy.
- firewallAndTrafficShaping (object): The firewall and traffic shaping rules and settings for your policy.
- contentFiltering (object): The content filtering settings for your group policy
- splashAuthSettings (string): Whether clients bound to your policy will bypass splash authorization or behave according to the network's rules. Can be one of 'network default' or 'bypass'. Only available if your network has a wireless configuration.
- vlanTagging (object): The VLAN tagging settings for your group policy. Only available if your network has a wireless configuration.
- bonjourForwarding (object): The Bonjour settings for your group policy. Only valid if your network has a wireless configuration.
"""
kwargs.update(locals())
if 'splashAuthSettings' in kwargs:
options = ['network default', 'bypass']
assert kwargs['splashAuthSettings'] in options, f'''"splashAuthSettings" cannot be "{kwargs['splashAuthSettings']}", & must be set to one of: {options}'''
metadata = {
'tags': ['networks', 'configure', 'groupPolicies'],
'operation': 'createNetworkGroupPolicy',
}
resource = f'/networks/{networkId}/groupPolicies'
body_params = ['name', 'scheduling', 'bandwidth', 'firewallAndTrafficShaping', 'contentFiltering', 'splashAuthSettings', 'vlanTagging', 'bonjourForwarding']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def getNetworkGroupPolicy(self, networkId: str, groupPolicyId: str):
"""
**Display a group policy**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-group-policy
- networkId (string)
- groupPolicyId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'groupPolicies'],
'operation': 'getNetworkGroupPolicy',
}
resource = f'/networks/{networkId}/groupPolicies/{groupPolicyId}'
return self._session.get(metadata, resource)
def updateNetworkGroupPolicy(self, networkId: str, groupPolicyId: str, **kwargs):
"""
**Update a group policy**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-group-policy
- networkId (string)
- groupPolicyId (string)
- name (string): The name for your group policy.
- scheduling (object): The schedule for the group policy. Schedules are applied to days of the week.
- bandwidth (object): The bandwidth settings for clients bound to your group policy.
- firewallAndTrafficShaping (object): The firewall and traffic shaping rules and settings for your policy.
- contentFiltering (object): The content filtering settings for your group policy
- splashAuthSettings (string): Whether clients bound to your policy will bypass splash authorization or behave according to the network's rules. Can be one of 'network default' or 'bypass'. Only available if your network has a wireless configuration.
- vlanTagging (object): The VLAN tagging settings for your group policy. Only available if your network has a wireless configuration.
- bonjourForwarding (object): The Bonjour settings for your group policy. Only valid if your network has a wireless configuration.
"""
kwargs.update(locals())
if 'splashAuthSettings' in kwargs:
options = ['network default', 'bypass']
assert kwargs['splashAuthSettings'] in options, f'''"splashAuthSettings" cannot be "{kwargs['splashAuthSettings']}", & must be set to one of: {options}'''
metadata = {
'tags': ['networks', 'configure', 'groupPolicies'],
'operation': 'updateNetworkGroupPolicy',
}
resource = f'/networks/{networkId}/groupPolicies/{groupPolicyId}'
body_params = ['name', 'scheduling', 'bandwidth', 'firewallAndTrafficShaping', 'contentFiltering', 'splashAuthSettings', 'vlanTagging', 'bonjourForwarding']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def deleteNetworkGroupPolicy(self, networkId: str, groupPolicyId: str):
"""
**Delete a group policy**
https://developer.cisco.com/docs/meraki-api-v1/#!delete-network-group-policy
- networkId (string)
- groupPolicyId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'groupPolicies'],
'operation': 'deleteNetworkGroupPolicy',
}
resource = f'/networks/{networkId}/groupPolicies/{groupPolicyId}'
return self._session.delete(metadata, resource)
def getNetworkHttpServers(self, networkId: str):
"""
**List the HTTP servers for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-http-servers
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'httpServers'],
'operation': 'getNetworkHttpServers',
}
resource = f'/networks/{networkId}/httpServers'
return self._session.get(metadata, resource)
def createNetworkHttpServer(self, networkId: str, name: str, url: str, **kwargs):
"""
**Add an HTTP server to a network**
https://developer.cisco.com/docs/meraki-api-v1/#!create-network-http-server
- networkId (string)
- name (string): A name for easy reference to the HTTP server
- url (string): The URL of the HTTP server
- sharedSecret (string): A shared secret that will be included in POSTs sent to the HTTP server. This secret can be used to verify that the request was sent by Meraki.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'httpServers'],
'operation': 'createNetworkHttpServer',
}
resource = f'/networks/{networkId}/httpServers'
body_params = ['name', 'url', 'sharedSecret']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def createNetworkHttpServersWebhookTest(self, networkId: str, url: str):
"""
**Send a test webhook for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!create-network-http-servers-webhook-test
- networkId (string)
- url (string): The URL where the test webhook will be sent
"""
kwargs = locals()
metadata = {
'tags': ['networks', 'configure', 'httpServers', 'webhookTests'],
'operation': 'createNetworkHttpServersWebhookTest',
}
resource = f'/networks/{networkId}/httpServers/webhookTests'
body_params = ['url']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def getNetworkHttpServersWebhookTest(self, networkId: str, id: str):
"""
**Return the status of a webhook test for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-http-servers-webhook-test
- networkId (string)
- id (string)
"""
metadata = {
'tags': ['networks', 'configure', 'httpServers', 'webhookTests'],
'operation': 'getNetworkHttpServersWebhookTest',
}
resource = f'/networks/{networkId}/httpServers/webhookTests/{id}'
return self._session.get(metadata, resource)
def getNetworkHttpServer(self, networkId: str, id: str):
"""
**Return an HTTP server for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-http-server
- networkId (string)
- id (string)
"""
metadata = {
'tags': ['networks', 'configure', 'httpServers'],
'operation': 'getNetworkHttpServer',
}
resource = f'/networks/{networkId}/httpServers/{id}'
return self._session.get(metadata, resource)
def updateNetworkHttpServer(self, networkId: str, id: str, **kwargs):
"""
**Update an HTTP server**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-http-server
- networkId (string)
- id (string)
- name (string): A name for easy reference to the HTTP server
- url (string): The URL of the HTTP server
- sharedSecret (string): A shared secret that will be included in POSTs sent to the HTTP server. This secret can be used to verify that the request was sent by Meraki.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'httpServers'],
'operation': 'updateNetworkHttpServer',
}
resource = f'/networks/{networkId}/httpServers/{id}'
body_params = ['name', 'url', 'sharedSecret']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def deleteNetworkHttpServer(self, networkId: str, id: str):
"""
**Delete an HTTP server from a network**
https://developer.cisco.com/docs/meraki-api-v1/#!delete-network-http-server
- networkId (string)
- id (string)
"""
metadata = {
'tags': ['networks', 'configure', 'httpServers'],
'operation': 'deleteNetworkHttpServer',
}
resource = f'/networks/{networkId}/httpServers/{id}'
return self._session.delete(metadata, resource)
def getNetworkMerakiAuthUsers(self, networkId: str):
"""
**List the splash or RADIUS users configured under Meraki Authentication for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-meraki-auth-users
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'merakiAuthUsers'],
'operation': 'getNetworkMerakiAuthUsers',
}
resource = f'/networks/{networkId}/merakiAuthUsers'
return self._session.get(metadata, resource)
def getNetworkMerakiAuthUser(self, networkId: str, merakiAuthUserId: str):
"""
**Return the Meraki Auth splash or RADIUS user**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-meraki-auth-user
- networkId (string)
- merakiAuthUserId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'merakiAuthUsers'],
'operation': 'getNetworkMerakiAuthUser',
}
resource = f'/networks/{networkId}/merakiAuthUsers/{merakiAuthUserId}'
return self._session.get(metadata, resource)
def getNetworkNetflowSettings(self, networkId: str):
"""
**Return the NetFlow traffic reporting settings for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-netflow-settings
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'netflowSettings'],
'operation': 'getNetworkNetflowSettings',
}
resource = f'/networks/{networkId}/netflowSettings'
return self._session.get(metadata, resource)
def updateNetworkNetflowSettings(self, networkId: str, **kwargs):
"""
**Update the NetFlow traffic reporting settings for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-netflow-settings
- networkId (string)
- reportingEnabled (boolean): Boolean indicating whether NetFlow traffic reporting is enabled (true) or disabled (false).
- collectorIp (string): The IPv4 address of the NetFlow collector.
- collectorPort (integer): The port that the NetFlow collector will be listening on.
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'netflowSettings'],
'operation': 'updateNetworkNetflowSettings',
}
resource = f'/networks/{networkId}/netflowSettings'
body_params = ['reportingEnabled', 'collectorIp', 'collectorPort']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def getNetworkPiiPiiKeys(self, networkId: str, **kwargs):
"""
**List the keys required to access Personally Identifiable Information (PII) for a given identifier. Exactly one identifier will be accepted. If the organization contains org-wide Systems Manager users matching the key provided then there will be an entry with the key "0" containing the applicable keys.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-pii-pii-keys
- networkId (string)
- username (string): The username of a Systems Manager user
- email (string): The email of a network user account or a Systems Manager device
- mac (string): The MAC of a network client device or a Systems Manager device
- serial (string): The serial of a Systems Manager device
- imei (string): The IMEI of a Systems Manager device
- bluetoothMac (string): The MAC of a Bluetooth client
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'pii', 'piiKeys'],
'operation': 'getNetworkPiiPiiKeys',
}
resource = f'/networks/{networkId}/pii/piiKeys'
query_params = ['username', 'email', 'mac', 'serial', 'imei', 'bluetoothMac']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get(metadata, resource, params)
def getNetworkPiiRequests(self, networkId: str):
"""
**List the PII requests for this network or organization**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-pii-requests
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'pii', 'requests'],
'operation': 'getNetworkPiiRequests',
}
resource = f'/networks/{networkId}/pii/requests'
return self._session.get(metadata, resource)
def createNetworkPiiRequest(self, networkId: str, **kwargs):
"""
**Submit a new delete or restrict processing PII request**
https://developer.cisco.com/docs/meraki-api-v1/#!create-network-pii-request
- networkId (string)
- type (string): One of "delete" or "restrict processing"
- datasets (array): The datasets related to the provided key that should be deleted. Only applies to "delete" requests. The value "all" will be expanded to all datasets applicable to this type. The datasets by applicable to each type are: mac (usage, events, traffic), email (users, loginAttempts), username (users, loginAttempts), bluetoothMac (client, connectivity), smDeviceId (device), smUserId (user)
- username (string): The username of a network log in. Only applies to "delete" requests.
- email (string): The email of a network user account. Only applies to "delete" requests.
- mac (string): The MAC of a network client device. Applies to both "restrict processing" and "delete" requests.
- smDeviceId (string): The sm_device_id of a Systems Manager device. The only way to "restrict processing" or "delete" a Systems Manager device. Must include "device" in the dataset for a "delete" request to destroy the device.
- smUserId (string): The sm_user_id of a Systems Manager user. The only way to "restrict processing" or "delete" a Systems Manager user. Must include "user" in the dataset for a "delete" request to destroy the user.
"""
kwargs.update(locals())
if 'type' in kwargs:
options = ['delete', 'restrict processing']
assert kwargs['type'] in options, f'''"type" cannot be "{kwargs['type']}", & must be set to one of: {options}'''
metadata = {
'tags': ['networks', 'configure', 'pii', 'requests'],
'operation': 'createNetworkPiiRequest',
}
resource = f'/networks/{networkId}/pii/requests'
body_params = ['type', 'datasets', 'username', 'email', 'mac', 'smDeviceId', 'smUserId']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.post(metadata, resource, payload)
def getNetworkPiiRequest(self, networkId: str, requestId: str):
"""
**Return a PII request**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-pii-request
- networkId (string)
- requestId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'pii', 'requests'],
'operation': 'getNetworkPiiRequest',
}
resource = f'/networks/{networkId}/pii/requests/{requestId}'
return self._session.get(metadata, resource)
def deleteNetworkPiiRequest(self, networkId: str, requestId: str):
"""
**Delete a restrict processing PII request**
https://developer.cisco.com/docs/meraki-api-v1/#!delete-network-pii-request
- networkId (string)
- requestId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'pii', 'requests'],
'operation': 'deleteNetworkPiiRequest',
}
resource = f'/networks/{networkId}/pii/requests/{requestId}'
return self._session.delete(metadata, resource)
def getNetworkPiiSmDevicesForKey(self, networkId: str, **kwargs):
"""
**Given a piece of Personally Identifiable Information (PII), return the Systems Manager device ID(s) associated with that identifier. These device IDs can be used with the Systems Manager API endpoints to retrieve device details. Exactly one identifier will be accepted.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-pii-sm-devices-for-key
- networkId (string)
- username (string): The username of a Systems Manager user
- email (string): The email of a network user account or a Systems Manager device
- mac (string): The MAC of a network client device or a Systems Manager device
- serial (string): The serial of a Systems Manager device
- imei (string): The IMEI of a Systems Manager device
- bluetoothMac (string): The MAC of a Bluetooth client
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'pii', 'smDevicesForKey'],
'operation': 'getNetworkPiiSmDevicesForKey',
}
resource = f'/networks/{networkId}/pii/smDevicesForKey'
query_params = ['username', 'email', 'mac', 'serial', 'imei', 'bluetoothMac']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get(metadata, resource, params)
def getNetworkPiiSmOwnersForKey(self, networkId: str, **kwargs):
"""
**Given a piece of Personally Identifiable Information (PII), return the Systems Manager owner ID(s) associated with that identifier. These owner IDs can be used with the Systems Manager API endpoints to retrieve owner details. Exactly one identifier will be accepted.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-pii-sm-owners-for-key
- networkId (string)
- username (string): The username of a Systems Manager user
- email (string): The email of a network user account or a Systems Manager device
- mac (string): The MAC of a network client device or a Systems Manager device
- serial (string): The serial of a Systems Manager device
- imei (string): The IMEI of a Systems Manager device
- bluetoothMac (string): The MAC of a Bluetooth client
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'pii', 'smOwnersForKey'],
'operation': 'getNetworkPiiSmOwnersForKey',
}
resource = f'/networks/{networkId}/pii/smOwnersForKey'
query_params = ['username', 'email', 'mac', 'serial', 'imei', 'bluetoothMac']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get(metadata, resource, params)
def getNetworkSettings(self, networkId: str):
"""
**Return the settings for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-settings
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'settings'],
'operation': 'getNetworkSettings',
}
resource = f'/networks/{networkId}/settings'
return self._session.get(metadata, resource)
def updateNetworkSettings(self, networkId: str, **kwargs):
"""
**Update the settings for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-settings
- networkId (string)
- localStatusPageEnabled (boolean): Enables / disables the local device status pages (<a target='_blank' href='http://my.meraki.com/'>my.meraki.com, </a><a target='_blank' href='http://ap.meraki.com/'>ap.meraki.com, </a><a target='_blank' href='http://switch.meraki.com/'>switch.meraki.com, </a><a target='_blank' href='http://wired.meraki.com/'>wired.meraki.com</a>). Optional (defaults to false)
- remoteStatusPageEnabled (boolean): Enables / disables access to the device status page (<a target='_blank'>http://[device's LAN IP])</a>. Optional. Can only be set if localStatusPageEnabled is set to true
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'settings'],
'operation': 'updateNetworkSettings',
}
resource = f'/networks/{networkId}/settings'
body_params = ['localStatusPageEnabled', 'remoteStatusPageEnabled']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def getNetworkSnmpSettings(self, networkId: str):
"""
**Return the SNMP settings for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-snmp-settings
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'snmpSettings'],
'operation': 'getNetworkSnmpSettings',
}
resource = f'/networks/{networkId}/snmpSettings'
return self._session.get(metadata, resource)
def updateNetworkSnmpSettings(self, networkId: str, **kwargs):
"""
**Update the SNMP settings for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-snmp-settings
- networkId (string)
- access (string): The type of SNMP access. Can be one of 'none' (disabled), 'community' (V1/V2c), or 'users' (V3).
- communityString (string): The SNMP community string. Only relevant if 'access' is set to 'community'.
- users (array): The list of SNMP users. Only relevant if 'access' is set to 'users'.
"""
kwargs.update(locals())
if 'access' in kwargs:
options = ['none', 'community', 'users']
assert kwargs['access'] in options, f'''"access" cannot be "{kwargs['access']}", & must be set to one of: {options}'''
metadata = {
'tags': ['networks', 'configure', 'snmpSettings'],
'operation': 'updateNetworkSnmpSettings',
}
resource = f'/networks/{networkId}/snmpSettings'
body_params = ['access', 'communityString', 'users']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def getNetworkSplashLoginAttempts(self, networkId: str, **kwargs):
"""
**List the splash login attempts for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-splash-login-attempts
- networkId (string)
- ssidNumber (integer): Only return the login attempts for the specified SSID
- loginIdentifier (string): The username, email, or phone number used during login
- timespan (integer): The timespan, in seconds, for the login attempts. The period will be from [timespan] seconds ago until now. The maximum timespan is 3 months
"""
kwargs.update(locals())
if 'ssidNumber' in kwargs:
options = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
assert kwargs['ssidNumber'] in options, f'''"ssidNumber" cannot be "{kwargs['ssidNumber']}", & must be set to one of: {options}'''
metadata = {
'tags': ['networks', 'monitor', 'splashLoginAttempts'],
'operation': 'getNetworkSplashLoginAttempts',
}
resource = f'/networks/{networkId}/splashLoginAttempts'
query_params = ['ssidNumber', 'loginIdentifier', 'timespan']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get(metadata, resource, params)
def splitNetwork(self, networkId: str):
"""
**Split a combined network into individual networks for each type of device**
https://developer.cisco.com/docs/meraki-api-v1/#!split-network
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure'],
'operation': 'splitNetwork',
}
resource = f'/networks/{networkId}/split'
return self._session.post(metadata, resource)
def swapNetworkWarmspare(self, networkId: str):
"""
**Swap MX primary and warm spare appliances**
https://developer.cisco.com/docs/meraki-api-v1/#!swap-network-warmspare
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure'],
'operation': 'swapNetworkWarmspare',
}
resource = f'/networks/{networkId}/swapWarmSpare'
return self._session.post(metadata, resource)
def getNetworkSyslogServers(self, networkId: str):
"""
**List the syslog servers for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-syslog-servers
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'syslogServers'],
'operation': 'getNetworkSyslogServers',
}
resource = f'/networks/{networkId}/syslogServers'
return self._session.get(metadata, resource)
def updateNetworkSyslogServers(self, networkId: str, servers: list):
"""
**Update the syslog servers for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-syslog-servers
- networkId (string)
- servers (array): A list of the syslog servers for this network
"""
kwargs = locals()
metadata = {
'tags': ['networks', 'configure', 'syslogServers'],
'operation': 'updateNetworkSyslogServers',
}
resource = f'/networks/{networkId}/syslogServers'
body_params = ['servers']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def getNetworkTraffic(self, networkId: str, **kwargs):
"""
** The traffic analysis data for this network.
<a href="https://documentation.meraki.com/MR/Monitoring_and_Reporting/Hostname_Visibility">Traffic Analysis with Hostname Visibility</a> must be enabled on the network.
**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-traffic
- networkId (string)
- t0 (string): The beginning of the timespan for the data. The maximum lookback period is 30 days from today.
- timespan (number): The timespan for which the information will be fetched. If specifying timespan, do not specify parameter t0. The value must be in seconds and be less than or equal to 30 days.
- deviceType (string): Filter the data by device type: 'combined', 'wireless', 'switch' or 'appliance'. Defaults to 'combined'.
When using 'combined', for each rule the data will come from the device type with the most usage.
"""
kwargs.update(locals())
if 'deviceType' in kwargs:
options = ['combined', 'wireless', 'switch', 'appliance']
assert kwargs['deviceType'] in options, f'''"deviceType" cannot be "{kwargs['deviceType']}", & must be set to one of: {options}'''
metadata = {
'tags': ['networks', 'monitor', 'traffic'],
'operation': 'getNetworkTraffic',
}
resource = f'/networks/{networkId}/traffic'
query_params = ['t0', 'timespan', 'deviceType']
params = {k: v for (k, v) in kwargs.items() if k in query_params}
return self._session.get(metadata, resource, params)
def getNetworkTrafficAnalysisSettings(self, networkId: str):
"""
**Return the traffic analysis settings for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-traffic-analysis-settings
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'trafficAnalysisSettings'],
'operation': 'getNetworkTrafficAnalysisSettings',
}
resource = f'/networks/{networkId}/trafficAnalysisSettings'
return self._session.get(metadata, resource)
def updateNetworkTrafficAnalysisSettings(self, networkId: str, **kwargs):
"""
**Update the traffic analysis settings for a network**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-traffic-analysis-settings
- networkId (string)
- mode (string): The traffic analysis mode for the network. Can be one of 'disabled' (do not collect traffic types),
'basic' (collect generic traffic categories), or 'detailed' (collect destination hostnames).
- customPieChartItems (array): The list of items that make up the custom pie chart for traffic reporting.
"""
kwargs.update(locals())
if 'mode' in kwargs:
options = ['disabled', 'basic', 'detailed']
assert kwargs['mode'] in options, f'''"mode" cannot be "{kwargs['mode']}", & must be set to one of: {options}'''
metadata = {
'tags': ['networks', 'configure', 'trafficAnalysisSettings'],
'operation': 'updateNetworkTrafficAnalysisSettings',
}
resource = f'/networks/{networkId}/trafficAnalysisSettings'
body_params = ['mode', 'customPieChartItems']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
def getNetworkTrafficShapingApplicationCategories(self, networkId: str):
"""
**Returns the application categories for traffic shaping rules.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-traffic-shaping-application-categories
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'trafficShaping', 'applicationCategories'],
'operation': 'getNetworkTrafficShapingApplicationCategories',
}
resource = f'/networks/{networkId}/trafficShaping/applicationCategories'
return self._session.get(metadata, resource)
def getNetworkTrafficShapingDscpTaggingOptions(self, networkId: str):
"""
**Returns the available DSCP tagging options for your traffic shaping rules.**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-traffic-shaping-dscp-tagging-options
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'trafficShaping', 'dscpTaggingOptions'],
'operation': 'getNetworkTrafficShapingDscpTaggingOptions',
}
resource = f'/networks/{networkId}/trafficShaping/dscpTaggingOptions'
return self._session.get(metadata, resource)
def unbindNetwork(self, networkId: str):
"""
**Unbind a network from a template.**
https://developer.cisco.com/docs/meraki-api-v1/#!unbind-network
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure'],
'operation': 'unbindNetwork',
}
resource = f'/networks/{networkId}/unbind'
return self._session.post(metadata, resource)
def getNetworkWarmSpareSettings(self, networkId: str):
"""
**Return MX warm spare settings**
https://developer.cisco.com/docs/meraki-api-v1/#!get-network-warm-spare-settings
- networkId (string)
"""
metadata = {
'tags': ['networks', 'configure', 'warmSpareSettings'],
'operation': 'getNetworkWarmSpareSettings',
}
resource = f'/networks/{networkId}/warmSpareSettings'
return self._session.get(metadata, resource)
def updateNetworkWarmSpareSettings(self, networkId: str, enabled: bool, **kwargs):
"""
**Update MX warm spare settings**
https://developer.cisco.com/docs/meraki-api-v1/#!update-network-warm-spare-settings
- networkId (string)
- enabled (boolean): Enable warm spare
- spareSerial (string): Serial number of the warm spare appliance
- uplinkMode (string): Uplink mode, either virtual or public
- virtualIp1 (string): The WAN 1 shared IP
- virtualIp2 (string): The WAN 2 shared IP
"""
kwargs.update(locals())
metadata = {
'tags': ['networks', 'configure', 'warmSpareSettings'],
'operation': 'updateNetworkWarmSpareSettings',
}
resource = f'/networks/{networkId}/warmSpareSettings'
body_params = ['enabled', 'spareSerial', 'uplinkMode', 'virtualIp1', 'virtualIp2']
payload = {k: v for (k, v) in kwargs.items() if k in body_params}
return self._session.put(metadata, resource, payload)
| 46.776351 | 746 | 0.639674 |
5428c5d36d16dcb676f00d2321bb87f48c3fd307 | 8,500 | py | Python | tests/conftest.py | mabudz/pyam | ac64c7194c36b796ae473cae47c2917d3848e5bb | [
"Apache-2.0"
] | 1 | 2018-08-16T11:59:42.000Z | 2018-08-16T11:59:42.000Z | tests/conftest.py | shaohuizhang/pyam | 2dffec07ce86b2f3fb8133ac369fa3c172589064 | [
"Apache-2.0"
] | null | null | null | tests/conftest.py | shaohuizhang/pyam | 2dffec07ce86b2f3fb8133ac369fa3c172589064 | [
"Apache-2.0"
] | 1 | 2022-03-11T12:19:51.000Z | 2022-03-11T12:19:51.000Z | # has to go first for environment setup reasons
import matplotlib
matplotlib.use("agg")
from pathlib import Path
import os
from requests.exceptions import ConnectionError
import pytest
import numpy as np
import pandas as pd
from datetime import datetime
from pyam import IamDataFrame, META_IDX, IAMC_IDX, iiasa
# verify whether IIASA database API can be reached, skip tests otherwise
try:
iiasa.Connection()
IIASA_UNAVAILABLE = False
except ConnectionError: # pragma: no cover
IIASA_UNAVAILABLE = True
TEST_API = "integration-test"
TEST_API_NAME = "IXSE_INTEGRATION_TEST"
here = Path(__file__).parent
IMAGE_BASELINE_DIR = here / "expected_figs"
TEST_DATA_DIR = here / "data"
TEST_YEARS = [2005, 2010]
TEST_DTS = [datetime(2005, 6, 17), datetime(2010, 7, 21)]
TEST_TIME_STR = ["2005-06-17", "2010-07-21"]
TEST_TIME_STR_HR = ["2005-06-17 00:00:00", "2010-07-21 12:00:00"]
TEST_TIME_MIXED = [2005, datetime(2010, 7, 21)]
DTS_MAPPING = {2005: TEST_DTS[0], 2010: TEST_DTS[1]}
EXP_DATETIME_INDEX = pd.DatetimeIndex(["2005-06-17T00:00:00"])
TEST_DF = pd.DataFrame(
[
["model_a", "scen_a", "World", "Primary Energy", "EJ/yr", 1, 6.0],
["model_a", "scen_a", "World", "Primary Energy|Coal", "EJ/yr", 0.5, 3],
["model_a", "scen_b", "World", "Primary Energy", "EJ/yr", 2, 7],
],
columns=IAMC_IDX + TEST_YEARS,
)
META_COLS = ["number", "string"]
META_DF = pd.DataFrame(
[
["model_a", "scen_a", 1, "foo"],
["model_a", "scen_b", 2, np.nan],
],
columns=META_IDX + META_COLS,
).set_index(META_IDX)
FULL_FEATURE_DF = pd.DataFrame(
[
["World", "Primary Energy", "EJ/yr", 12, 15],
["reg_a", "Primary Energy", "EJ/yr", 8, 9],
["reg_b", "Primary Energy", "EJ/yr", 4, 6],
["World", "Primary Energy|Coal", "EJ/yr", 9, 10],
["reg_a", "Primary Energy|Coal", "EJ/yr", 6, 6],
["reg_b", "Primary Energy|Coal", "EJ/yr", 3, 4],
["World", "Primary Energy|Wind", "EJ/yr", 3, 5],
["reg_a", "Primary Energy|Wind", "EJ/yr", 2, 3],
["reg_b", "Primary Energy|Wind", "EJ/yr", 1, 2],
["World", "Emissions|CO2", "EJ/yr", 10, 14],
["World", "Emissions|CO2|Energy", "EJ/yr", 6, 8],
["World", "Emissions|CO2|AFOLU", "EJ/yr", 3, 4],
["World", "Emissions|CO2|Bunkers", "EJ/yr", 1, 2],
["reg_a", "Emissions|CO2", "EJ/yr", 6, 8],
["reg_a", "Emissions|CO2|Energy", "EJ/yr", 4, 5],
["reg_a", "Emissions|CO2|AFOLU", "EJ/yr", 2, 3],
["reg_b", "Emissions|CO2", "EJ/yr", 3, 4],
["reg_b", "Emissions|CO2|Energy", "EJ/yr", 2, 3],
["reg_b", "Emissions|CO2|AFOLU", "EJ/yr", 1, 1],
["World", "Price|Carbon", "USD/tCO2", 4, 27],
["reg_a", "Price|Carbon", "USD/tCO2", 1, 30],
["reg_b", "Price|Carbon", "USD/tCO2", 10, 21],
["World", "Population", "m", 3, 5],
["reg_a", "Population", "m", 2, 3],
["reg_b", "Population", "m", 1, 2],
],
columns=["region", "variable", "unit"] + TEST_YEARS,
)
img = ["IMAGE", "a_scenario"]
msg = ["MESSAGE-GLOBIOM", "a_scenario"]
REG_DF = pd.DataFrame(
[
img + ["NAF", "Primary Energy", "EJ/yr", 1, 6],
img + ["ME", "Primary Energy", "EJ/yr", 2, 7],
img + ["World", "Primary Energy", "EJ/yr", 3, 13],
msg + ["MEA", "Primary Energy", "EJ/yr", 1, 6],
msg + ["AFR", "Primary Energy", "EJ/yr", 2, 7],
msg + ["World", "Primary Energy", "EJ/yr", 3, 13],
],
columns=IAMC_IDX + TEST_YEARS,
)
RECURSIVE_DF = pd.DataFrame(
[
["Secondary Energy|Electricity", "EJ/yr", 5, 19.0],
["Secondary Energy|Electricity|Wind", "EJ/yr", 5, 17],
["Secondary Energy|Electricity|Wind|Offshore", "EJ/yr", 1, 5],
["Secondary Energy|Electricity|Wind|Onshore", "EJ/yr", 4, 12],
["Secondary Energy|Electricity|Solar", "EJ/yr", np.nan, 2],
],
columns=["variable", "unit"] + TEST_YEARS,
)
TEST_STACKPLOT_DF = pd.DataFrame(
[
["World", "Emissions|CO2|Energy|Oil", "Mt CO2/yr", 2, 3.2, 2.0, 1.8],
["World", "Emissions|CO2|Energy|Gas", "Mt CO2/yr", 1.3, 1.6, 1.0, 0.7],
["World", "Emissions|CO2|Energy|BECCS", "Mt CO2/yr", 0.0, 0.4, -0.4, 0.3],
["World", "Emissions|CO2|Cars", "Mt CO2/yr", 1.6, 3.8, 3.0, 2.5],
["World", "Emissions|CO2|Tar", "Mt CO2/yr", 0.3, 0.35, 0.35, 0.33],
["World", "Emissions|CO2|Agg", "Mt CO2/yr", 0.5, -0.1, -0.5, -0.7],
["World", "Emissions|CO2|LUC", "Mt CO2/yr", -0.3, -0.6, -1.2, -1.0],
],
columns=["region", "variable", "unit", 2005, 2010, 2015, 2020],
)
# appease stickler
TEST_STACKPLOT_DF["model"] = "IMG"
TEST_STACKPLOT_DF["scenario"] = "a_scen"
# minimal IamDataFrame with four different time formats
@pytest.fixture(
scope="function",
params=[
# standard IAMC format
{},
# testing several versions of datetime format
dict([(i, j) for i, j in zip(TEST_YEARS, TEST_DTS)]),
dict([(i, j) for i, j in zip(TEST_YEARS, TEST_TIME_STR)]),
dict([(i, j) for i, j in zip(TEST_YEARS, TEST_TIME_STR_HR)]),
],
)
def test_df(request):
df = IamDataFrame(data=TEST_DF.rename(request.param, axis="columns"))
for i in META_COLS:
df.set_meta(META_DF[i])
yield df
# minimal IamDataFrame for specifically testing 'year'-column features
@pytest.fixture(scope="function")
def test_df_year():
df = IamDataFrame(data=TEST_DF)
for i in META_COLS:
df.set_meta(META_DF[i])
yield df
# minimal IamDataFrame for specifically testing 'time'-column features
@pytest.fixture(scope="function")
def test_df_time():
df = IamDataFrame(
data=TEST_DF.rename({2005: TEST_DTS[0], 2010: TEST_DTS[1]}, axis="columns")
)
for i in META_COLS:
df.set_meta(META_DF[i])
yield df
# minimal IamDataFrame for specifically testing 'mixed'-time-domain features
@pytest.fixture(scope="function")
def test_df_mixed():
mapping = dict([(i, j) for i, j in zip(TEST_YEARS, TEST_TIME_MIXED)])
df = IamDataFrame(data=TEST_DF.rename(mapping, axis="columns"))
for i in META_COLS:
df.set_meta(META_DF[i])
yield df
# minimal test data as pandas.DataFrame (only 'year' time format)
@pytest.fixture(scope="function")
def test_pd_df():
yield TEST_DF.copy()
# minimal test data as pandas.DataFrame with index (only 'year' time format)
@pytest.fixture(scope="function")
def test_df_index():
yield TEST_DF.set_index(IAMC_IDX)
# IamDataFrame with variable-and-region-structure for testing aggregation tools
@pytest.fixture(scope="function", params=[None, "datetime"])
def simple_df(request):
_df = FULL_FEATURE_DF.copy()
if request.param == "datetime":
_df.rename(DTS_MAPPING, axis="columns", inplace=True)
df = IamDataFrame(model="model_a", scenario="scen_a", data=_df)
df.set_meta("foo", "string")
yield df
# IamDataFrame with subannual time resolution
@pytest.fixture(scope="function")
def subannual_df():
_df = FULL_FEATURE_DF.iloc[0:6].copy()
def add_subannual(_data, name, value):
_data["subannual"] = name
_data[TEST_YEARS] = _data[TEST_YEARS] * value
return _data
# primary energy is a direct sum across sub-annual timeslices
mapping = [("year", 1), ("winter", 0.7), ("summer", 0.3)]
lst = [add_subannual(_df.copy(), name, value) for name, value in mapping]
df = IamDataFrame(model="model_a", scenario="scen_a", data=pd.concat(lst))
df.set_meta("foo", "string")
yield df
@pytest.fixture(scope="function")
def reg_df():
df = IamDataFrame(data=REG_DF)
yield df
@pytest.fixture(scope="session")
def plot_df():
df = IamDataFrame(data=os.path.join(TEST_DATA_DIR, "plot_data.csv"))
yield df
# IamDataFrame with two scenarios and structure for recursive aggregation
@pytest.fixture(scope="function", params=["year", "datetime"])
def recursive_df(request):
data = (
RECURSIVE_DF
if request.param == "year"
else RECURSIVE_DF.rename(DTS_MAPPING, axis="columns")
)
df = IamDataFrame(data, model="model_a", scenario="scen_a", region="World")
df2 = df.rename(scenario={"scen_a": "scen_b"})
df2._data *= 2
df.append(df2, inplace=True)
yield df
@pytest.fixture(scope="session")
def plot_stackplot_df():
df = IamDataFrame(TEST_STACKPLOT_DF)
yield df
@pytest.fixture(scope="session")
def conn():
if not IIASA_UNAVAILABLE:
return iiasa.Connection(TEST_API)
| 31.365314 | 83 | 0.618353 |
2e2b5df8db24a52abdcad1aac6b76d938a854d70 | 3,284 | py | Python | qiskit/qasm/node/id.py | romainfd/qiskit-terra | b5285ccc5cb1d17b7c73402833f2750b93652426 | [
"Apache-2.0"
] | 2 | 2020-12-26T21:12:30.000Z | 2021-05-18T12:53:42.000Z | qiskit/qasm/node/id.py | romainfd/qiskit-terra | b5285ccc5cb1d17b7c73402833f2750b93652426 | [
"Apache-2.0"
] | null | null | null | qiskit/qasm/node/id.py | romainfd/qiskit-terra | b5285ccc5cb1d17b7c73402833f2750b93652426 | [
"Apache-2.0"
] | 1 | 2020-07-13T17:56:46.000Z | 2020-07-13T17:56:46.000Z | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Node for an OPENQASM id."""
import warnings
from .node import Node
from .nodeexception import NodeException
class Id(Node):
"""Node for an OPENQASM id.
The node has no children but has fields name, line, and file.
There is a flag is_bit that is set when XXXXX to help with scoping.
"""
def __init__(self, id, line, file):
"""Create the id node."""
# pylint: disable=redefined-builtin
super().__init__("id", None, None)
self.name = id
self.line = line
self.file = file
# To help with scoping rules, so we know the id is a bit,
# this flag is set to True when the id appears in a gate declaration
self.is_bit = False
def to_string(self, indent):
"""Print the node with indent."""
ind = indent * ' '
print(ind, 'id', self.name)
def qasm(self, prec=None):
"""Return the corresponding OPENQASM string."""
if prec is not None:
warnings.warn('Parameter \'Id.qasm(..., prec)\' is no longer used and is being '
'deprecated.', DeprecationWarning, 2)
return self.name
def latex(self, prec=None, nested_scope=None):
"""Return the correspond math mode latex string."""
if prec is not None:
warnings.warn('Parameter \'Id.latex(..., prec)\' is no longer used and is being '
'deprecated.', DeprecationWarning, 2)
if not nested_scope:
return "\textrm{" + self.name + "}"
else:
if self.name not in nested_scope[-1]:
raise NodeException("Expected local parameter name: ",
"name=%s, " % self.name,
"line=%s, " % self.line,
"file=%s" % self.file)
return nested_scope[-1][self.name].latex(nested_scope[0:-1])
def sym(self, nested_scope=None):
"""Return the correspond symbolic number."""
if not nested_scope or self.name not in nested_scope[-1]:
raise NodeException("Expected local parameter name: ",
"name=%s, line=%s, file=%s" % (
self.name, self.line, self.file))
return nested_scope[-1][self.name].sym(nested_scope[0:-1])
def real(self, nested_scope=None):
"""Return the correspond floating point number."""
if not nested_scope or self.name not in nested_scope[-1]:
raise NodeException("Expected local parameter name: ",
"name=%s, line=%s, file=%s" % (
self.name, self.line, self.file))
return nested_scope[-1][self.name].real(nested_scope[0:-1])
| 38.635294 | 93 | 0.579781 |
816ffdc9a69bcc89914e4d93307131bea7fe088a | 2,782 | py | Python | libs/mpl/preprocessed/preprocess.py | Abce/boost | 2d7491a27211aa5defab113f8e2d657c3d85ca93 | [
"BSL-1.0"
] | 85 | 2015-02-08T20:36:17.000Z | 2021-11-14T20:38:31.000Z | libs/boost/libs/mpl/preprocessed/preprocess.py | flingone/frameworks_base_cmds_remoted | 4509d9f0468137ed7fd8d100179160d167e7d943 | [
"Apache-2.0"
] | 9 | 2015-01-28T16:33:19.000Z | 2020-04-12T23:03:28.000Z | libs/boost/libs/mpl/preprocessed/preprocess.py | flingone/frameworks_base_cmds_remoted | 4509d9f0468137ed7fd8d100179160d167e7d943 | [
"Apache-2.0"
] | 27 | 2015-01-28T16:33:30.000Z | 2021-08-12T05:04:39.000Z |
# Copyright Aleksey Gurtovoy 2001-2004
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
#
# See http://www.boost.org/libs/mpl for documentation.
# $Id$
# $Date$
# $Revision$
import pp
import shutil
import os.path
import os
import string
import sys
preprocess_cmd = open( "preprocess.cmd" ).readlines()[0]
def process( file, boost_root, dst_dir, mode ):
file_path = "%s.hpp" % os.path.splitext( file )[0]
os.system( preprocess_cmd % {
'boost_root': boost_root
, 'mode': mode
, 'file': file
, 'file_path': file_path
} )
os.rename( file_path, "%s.tmp" % file_path )
pp.main( "%s.tmp" % file_path, file_path )
os.remove( "%s.tmp" % file_path )
filename = os.path.basename(file_path)
dst_dir = os.path.join( dst_dir, mode )
dst_file = os.path.join( dst_dir, filename )
if os.path.exists( dst_file ):
shutil.copymode( filename, dst_file )
shutil.copy( filename, dst_dir )
os.remove( filename )
def process_all( root, boost_root, dst_dir, mode ):
files = os.listdir( root )
for file in files:
path = os.path.join( root, file )
if os.path.splitext( file )[1] == ".cpp":
process( path, boost_root, dst_dir, mode )
else:
if os.path.isdir( path ):
process_all( path, boost_root, dst_dir, mode )
def main( all_modes, src_dir, dst_dir ):
if len( sys.argv ) < 2:
print "\nUsage:\n\t %s <mode> <boost_root> [<source_file>]" % os.path.basename( sys.argv[0] )
print "\nPurpose:\n\t updates preprocessed version(s) of the header(s) in \"%s\" directory" % dst_dir
print "\nExample:\n\t the following command will re-generate and update all 'apply.hpp' headers:"
print "\n\t\t %s all f:\\cvs\\boost apply.cpp" % os.path.basename( sys.argv[0] )
sys.exit( -1 )
if sys.argv[1] == "all":
modes = all_modes
else:
modes = [sys.argv[1]]
boost_root = sys.argv[2]
dst_dir = os.path.join( boost_root, dst_dir )
for mode in modes:
if len( sys.argv ) > 3:
file = os.path.join( os.path.join( os.getcwd(), src_dir ), sys.argv[3] )
process( file, boost_root, dst_dir, mode )
else:
process_all( os.path.join( os.getcwd(), src_dir ), boost_root, dst_dir, mode )
if __name__ == '__main__':
main(
["bcc", "bcc551", "gcc", "msvc60", "msvc70", "mwcw", "dmc", "no_ctps", "no_ttp", "plain"]
, "src"
, os.path.join( "boost", "mpl", "aux_", "preprocessed" )
)
| 31.258427 | 110 | 0.575845 |
ef4b37943913d5aa2e80531f319847517cb450ab | 739 | py | Python | th2_common/schema/exception/router_error.py | ConnectDIY/th2-common-py | 977758a68d6a7db91ee38c36667e90bf663f14ef | [
"Apache-2.0"
] | null | null | null | th2_common/schema/exception/router_error.py | ConnectDIY/th2-common-py | 977758a68d6a7db91ee38c36667e90bf663f14ef | [
"Apache-2.0"
] | 18 | 2020-11-23T12:11:31.000Z | 2022-03-29T06:13:19.000Z | th2_common/schema/exception/router_error.py | ConnectDIY/th2-common-py | 977758a68d6a7db91ee38c36667e90bf663f14ef | [
"Apache-2.0"
] | 1 | 2021-01-20T11:21:57.000Z | 2021-01-20T11:21:57.000Z | # Copyright 2020-2020 Exactpro (Exactpro Systems Limited)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class RouterError(RuntimeError):
def __init__(self, *args: object) -> None:
super().__init__(*args)
| 36.95 | 76 | 0.726658 |
75e3f0678349749443791f268f33a0abb6d21e5a | 9,340 | py | Python | convert.py | stephenxp04/keras-yolo3-object-web-deploy | 7c1e0a7b02c044a724d29778b73c0ad0b7586c67 | [
"MIT"
] | 2 | 2019-04-11T11:46:25.000Z | 2020-12-30T11:11:48.000Z | convert.py | stephenxp04/keras-yolo3-object-web-deploy | 7c1e0a7b02c044a724d29778b73c0ad0b7586c67 | [
"MIT"
] | null | null | null | convert.py | stephenxp04/keras-yolo3-object-web-deploy | 7c1e0a7b02c044a724d29778b73c0ad0b7586c67 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
"""
Reads Darknet config and weights and creates Keras model with TF backend.
"""
import argparse
import configparser
import io
import os
from collections import defaultdict
import numpy as np
from keras import backend as K
from keras.layers import (Conv2D, Input, ZeroPadding2D, Add,
UpSampling2D, Concatenate)
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.normalization import BatchNormalization
from keras.models import Model
from keras.regularizers import l2
from keras.utils.vis_utils import plot_model as plot
parser = argparse.ArgumentParser(description='Darknet To Keras Converter.')
parser.add_argument('config_path', help='Path to Darknet cfg file.')
parser.add_argument('weights_path', help='Path to Darknet weights file.')
parser.add_argument('output_path', help='Path to output Keras model file.')
parser.add_argument(
'-p',
'--plot_model',
help='Plot generated Keras model and save as image.',
action='store_true')
def unique_config_sections(config_file):
"""Convert all config sections to have unique names.
Adds unique suffixes to config sections for compability with configparser.
"""
section_counters = defaultdict(int)
output_stream = io.StringIO()
with open(config_file) as fin:
for line in fin:
if line.startswith('['):
section = line.strip().strip('[]')
_section = section + '_' + str(section_counters[section])
section_counters[section] += 1
line = line.replace(section, _section)
output_stream.write(unicode(line))
output_stream.seek(0)
return output_stream
# %%
def _main(args):
config_path = os.path.expanduser(args.config_path)
weights_path = os.path.expanduser(args.weights_path)
assert config_path.endswith('.cfg'), '{} is not a .cfg file'.format(
config_path)
assert weights_path.endswith(
'.weights'), '{} is not a .weights file'.format(weights_path)
output_path = os.path.expanduser(args.output_path)
assert output_path.endswith(
'.h5'), 'output path {} is not a .h5 file'.format(output_path)
output_root = os.path.splitext(output_path)[0]
# Load weights and config.
print('Loading weights.')
weights_file = open(weights_path, 'rb')
major, minor, revision = np.ndarray(
shape=(3, ), dtype='int32', buffer=weights_file.read(12))
if (major*10+minor)>=2 and major<1000 and minor<1000:
seen = np.ndarray(shape=(1,), dtype='int64', buffer=weights_file.read(8))
else:
seen = np.ndarray(shape=(1,), dtype='int32', buffer=weights_file.read(4))
print('Weights Header: ', major, minor, revision, seen)
print('Parsing Darknet config.')
unique_config_file = unique_config_sections(config_path)
cfg_parser = configparser.ConfigParser()
cfg_parser.read_file(unique_config_file)
print('Creating Keras model.')
input_layer = Input(shape=(None, None, 3))
prev_layer = input_layer
all_layers = []
weight_decay = float(cfg_parser['net_0']['decay']
) if 'net_0' in cfg_parser.sections() else 5e-4
count = 0
out_index = []
for section in cfg_parser.sections():
print('Parsing section {}'.format(section))
if section.startswith('convolutional'):
filters = int(cfg_parser[section]['filters'])
size = int(cfg_parser[section]['size'])
stride = int(cfg_parser[section]['stride'])
pad = int(cfg_parser[section]['pad'])
activation = cfg_parser[section]['activation']
batch_normalize = 'batch_normalize' in cfg_parser[section]
padding = 'same' if pad == 1 and stride == 1 else 'valid'
# Setting weights.
# Darknet serializes convolutional weights as:
# [bias/beta, [gamma, mean, variance], conv_weights]
prev_layer_shape = K.int_shape(prev_layer)
weights_shape = (size, size, prev_layer_shape[-1], filters)
darknet_w_shape = (filters, weights_shape[2], size, size)
weights_size = np.product(weights_shape)
print('conv2d', 'bn'
if batch_normalize else ' ', activation, weights_shape)
conv_bias = np.ndarray(
shape=(filters, ),
dtype='float32',
buffer=weights_file.read(filters * 4))
count += filters
if batch_normalize:
bn_weights = np.ndarray(
shape=(3, filters),
dtype='float32',
buffer=weights_file.read(filters * 12))
count += 3 * filters
bn_weight_list = [
bn_weights[0], # scale gamma
conv_bias, # shift beta
bn_weights[1], # running mean
bn_weights[2] # running var
]
conv_weights = np.ndarray(
shape=darknet_w_shape,
dtype='float32',
buffer=weights_file.read(weights_size * 4))
count += weights_size
# DarkNet conv_weights are serialized Caffe-style:
# (out_dim, in_dim, height, width)
# We would like to set these to Tensorflow order:
# (height, width, in_dim, out_dim)
conv_weights = np.transpose(conv_weights, [2, 3, 1, 0])
conv_weights = [conv_weights] if batch_normalize else [
conv_weights, conv_bias
]
# Handle activation.
act_fn = None
if activation == 'leaky':
pass # Add advanced activation later.
elif activation != 'linear':
raise ValueError(
'Unknown activation function `{}` in section {}'.format(
activation, section))
# Create Conv2D layer
if stride>1:
# Darknet uses left and top padding instead of 'same' mode
prev_layer = ZeroPadding2D(((1,0),(1,0)))(prev_layer)
conv_layer = (Conv2D(
filters, (size, size),
strides=(stride, stride),
kernel_regularizer=l2(weight_decay),
use_bias=not batch_normalize,
weights=conv_weights,
activation=act_fn,
padding=padding))(prev_layer)
if batch_normalize:
conv_layer = (BatchNormalization(
weights=bn_weight_list))(conv_layer)
prev_layer = conv_layer
if activation == 'linear':
all_layers.append(prev_layer)
elif activation == 'leaky':
act_layer = LeakyReLU(alpha=0.1)(prev_layer)
prev_layer = act_layer
all_layers.append(act_layer)
elif section.startswith('route'):
ids = [int(i) for i in cfg_parser[section]['layers'].split(',')]
layers = [all_layers[i] for i in ids]
if len(layers) > 1:
print('Concatenating route layers:', layers)
concatenate_layer = Concatenate()(layers)
all_layers.append(concatenate_layer)
prev_layer = concatenate_layer
else:
skip_layer = layers[0] # only one layer to route
all_layers.append(skip_layer)
prev_layer = skip_layer
elif section.startswith('shortcut'):
index = int(cfg_parser[section]['from'])
activation = cfg_parser[section]['activation']
assert activation == 'linear', 'Only linear activation supported.'
all_layers.append(Add()([all_layers[index], prev_layer]))
prev_layer = all_layers[-1]
elif section.startswith('upsample'):
stride = int(cfg_parser[section]['stride'])
assert stride == 2, 'Only stride=2 supported.'
all_layers.append(UpSampling2D(stride)(prev_layer))
prev_layer = all_layers[-1]
elif section.startswith('yolo'):
out_index.append(len(all_layers)-1)
all_layers.append(None)
prev_layer = all_layers[-1]
elif section.startswith('net'):
pass
else:
raise ValueError(
'Unsupported section header type: {}'.format(section))
# Create and save model.
model = Model(inputs=input_layer, outputs=[all_layers[i] for i in out_index])
print(model.summary())
model.save('{}'.format(output_path))
print('Saved Keras model to {}'.format(output_path))
# Check to see if all weights have been read.
remaining_weights = len(weights_file.read()) / 4
weights_file.close()
print('Read {} of {} from Darknet weights.'.format(count, count +
remaining_weights))
if remaining_weights > 0:
print('Warning: {} unused weights'.format(remaining_weights))
if args.plot_model:
plot(model, to_file='{}.png'.format(output_root), show_shapes=True)
print('Saved model plot to {}.png'.format(output_root))
if __name__ == '__main__':
_main(parser.parse_args())
| 38.595041 | 81 | 0.595931 |
065ce952ff66ba988df8395a38d120bb1f84338c | 4,064 | py | Python | tutorial/app.py | p-ivanchik/Werkzeug-docs-cn | 18875d9f025e5ca6fce7e8ec443f9b9e28f474bc | [
"BSD-2-Clause"
] | 24 | 2015-05-27T02:57:05.000Z | 2020-03-19T03:33:11.000Z | tutorial/app.py | p-ivanchik/Werkzeug-docs-cn | 18875d9f025e5ca6fce7e8ec443f9b9e28f474bc | [
"BSD-2-Clause"
] | null | null | null | tutorial/app.py | p-ivanchik/Werkzeug-docs-cn | 18875d9f025e5ca6fce7e8ec443f9b9e28f474bc | [
"BSD-2-Clause"
] | 14 | 2015-10-14T18:19:41.000Z | 2021-08-19T16:16:32.000Z | import os
import redis
import urlparse
from werkzeug.wrappers import Request, Response
from werkzeug.routing import Map, Rule
from werkzeug.exceptions import HTTPException, NotFound
from werkzeug.wsgi import SharedDataMiddleware
from werkzeug.utils import redirect
from jinja2 import Environment, FileSystemLoader
def is_valid_url(url):
parts = urlparse.urlparse(url)
return parts.scheme in ('http', 'https')
def base36_encode(number):
assert number >= 0, 'positive integer required'
if number == 0:
return '0'
base36 = []
while number != 0:
number, i = divmod(number, 36)
base36.append('0123456789abcdefghijklmnopqrstuvwxyz'[i])
return ''.join(reversed(base36))
class Shortly(object):
def __init__(self, config):
self.redis = redis.Redis(config['redis_host'], config['redis_port'])
template_path = os.path.join(os.path.dirname(__file__), 'templates')
self.jinja_env = Environment(loader=FileSystemLoader(template_path),
autoescape=True)
self.url_map = Map([
Rule('/', endpoint='new_url'),
Rule('/<short_id>', endpoint='follow_short_link'),
Rule('/<short_id>+', endpoint='short_link_details')
])
def dispatch_request(self, request):
adapter = self.url_map.bind_to_environ(request.environ)
try:
endpoint, values = adapter.match()
return getattr(self, 'on_' + endpoint)(request, **values)
except HTTPException, e:
return e
def wsgi_app(self, environ, start_response):
request = Request(environ)
response = self.dispatch_request(request)
return response(environ, start_response)
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def render_template(self, template_name, **context):
t = self.jinja_env.get_template(template_name)
return Response(t.render(**context), mimetype='text/html')
def on_new_url(self, request):
error = None
url = ''
if request.method == 'POST':
url = request.form['url']
if not is_valid_url(url):
error = 'Please enter a valid URL'
else:
short_id = self.insert_url(url)
return redirect('/%s+' % short_id)
return self.render_template('new_url.html', error=error, url=url)
def on_follow_short_link(self, request, short_id):
link_target = self.redis.get('url-target:' + short_id)
if link_target is None:
raise NotFound()
self.redis.incr('click-count:' + short_id)
return redirect(link_target)
def on_short_link_details(self, request, short_id):
link_target = self.redis.get('url-target:' + short_id)
if link_target is None:
raise NotFound()
click_count = int(self.redis.get('click-count:' + short_id) or 0)
return self.render_template('short_link_details.html',
link_target=link_target,
short_id=short_id,
click_count=click_count
)
def insert_url(self, url):
short_id = self.redis.get('reverse-url:' + url)
if short_id is not None:
return short_id
url_num = self.redis.incr('last_url_id')
short_id = base36_encode(url_num)
self.redis.set('url-target:' + short_id, url)
self.redis.set('reverse-url:' + url, short_id)
return short_id
def create_app(redis_host='localhost', redis_port=6379, with_static=True):
app = Shortly({
'redis_host': redis_host,
'redis_port': redis_port
})
if with_static:
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/static': os.path.join(os.path.dirname(__file__), 'static')
})
return app
if __name__ == '__main__':
from werkzeug.serving import run_simple
app = create_app()
run_simple('127.0.0.1', 5000, app, use_debugger=True, use_reloader=True)
| 35.034483 | 76 | 0.633612 |
478aac80e6408702468aa10ab4e4b819e0f8777c | 313 | py | Python | app/user/urls.py | sorwarduet/recipe-app-api | ab13680369a693e5bf9d864328384f5781722cc2 | [
"MIT"
] | null | null | null | app/user/urls.py | sorwarduet/recipe-app-api | ab13680369a693e5bf9d864328384f5781722cc2 | [
"MIT"
] | null | null | null | app/user/urls.py | sorwarduet/recipe-app-api | ab13680369a693e5bf9d864328384f5781722cc2 | [
"MIT"
] | null | null | null |
from django.urls import path
from .views import CreateUserView, CreateUserToken, ManageUserView
app_name = 'user'
urlpatterns = [
path('create/', CreateUserView.as_view(), name='create'),
path('token/', CreateUserToken.as_view(), name='token'),
path('me/', ManageUserView.as_view(), name='me'),
]
| 24.076923 | 66 | 0.699681 |
ae661d7c7f6a7e2867e0f43b1d1a611775bc6cd6 | 24,638 | py | Python | ambari-agent/src/test/python/ambari_agent/TestRecoveryManager.py | emcvipr/ambari | 045e0d2ac94b8b81819d2efbfd7e1bddc67a7756 | [
"Apache-2.0",
"MIT"
] | 1 | 2021-05-06T06:24:04.000Z | 2021-05-06T06:24:04.000Z | ambari-agent/src/test/python/ambari_agent/TestRecoveryManager.py | emcvipr/ambari | 045e0d2ac94b8b81819d2efbfd7e1bddc67a7756 | [
"Apache-2.0",
"MIT"
] | null | null | null | ambari-agent/src/test/python/ambari_agent/TestRecoveryManager.py | emcvipr/ambari | 045e0d2ac94b8b81819d2efbfd7e1bddc67a7756 | [
"Apache-2.0",
"MIT"
] | 3 | 2017-10-31T11:42:31.000Z | 2021-04-26T07:17:53.000Z | #!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from unittest import TestCase
import copy
import tempfile
from ambari_agent.RecoveryManager import RecoveryManager
from mock.mock import patch, MagicMock, call
class _TestRecoveryManager(TestCase):
command = {
"commandType": "STATUS_COMMAND",
"payloadLevel": "EXECUTION_COMMAND",
"componentName": "NODEMANAGER",
"desiredState": "STARTED",
"hasStaleConfigs": False,
"executionCommandDetails": {
"commandType": "EXECUTION_COMMAND",
"roleCommand": "INSTALL",
"role": "NODEMANAGER",
"hostLevelParams": {
"custom_command":""},
"configurations": {
"capacity-scheduler": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"capacity-calculator": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"commandParams": {
"service_package_folder": "common-services/YARN/2.1.0.2.0/package"
}
}
}
}
exec_command1 = {
"commandType": "EXECUTION_COMMAND",
"roleCommand": "INSTALL",
"role": "NODEMANAGER",
"configurations": {
"capacity-scheduler": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"capacity-calculator": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"commandParams": {
"service_package_folder": "common-services/YARN/2.1.0.2.0/package"
}
},
"hostLevelParams": {}
}
exec_command2 = {
"commandType": "EXECUTION_COMMAND",
"roleCommand": "START",
"role": "NODEMANAGER",
"configurations": {
"capacity-scheduler": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"capacity-calculator": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"commandParams": {
"service_package_folder": "common-services/YARN/2.1.0.2.0/package"
}
},
"hostLevelParams": {}
}
exec_command3 = {
"commandType": "EXECUTION_COMMAND",
"roleCommand": "SERVICE_CHECK",
"role": "NODEMANAGER",
"configurations": {
"capacity-scheduler": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"capacity-calculator": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"commandParams": {
"service_package_folder": "common-services/YARN/2.1.0.2.0/package"
}
},
"hostLevelParams": {}
}
exec_command4 = {
"commandType": "EXECUTION_COMMAND",
"roleCommand": "CUSTOM_COMMAND",
"role": "NODEMANAGER",
"configurations": {
"capacity-scheduler": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"capacity-calculator": {
"yarn.scheduler.capacity.default.minimum-user-limit-percent": "100"},
"commandParams": {
"service_package_folder": "common-services/YARN/2.1.0.2.0/package"
}
},
"hostLevelParams": {
"custom_command": "RESTART"
}
}
def setUp(self):
pass
def tearDown(self):
pass
@patch.object(RecoveryManager, "update_desired_status")
def test_process_commands(self, mock_uds):
rm = RecoveryManager(tempfile.mktemp(), True)
rm.process_status_commands(None)
self.assertFalse(mock_uds.called)
rm.process_status_commands([])
self.assertFalse(mock_uds.called)
rm.process_status_commands([self.command])
mock_uds.assert_has_calls([call("NODEMANAGER", "STARTED")])
mock_uds.reset_mock()
rm.process_status_commands([self.command, self.exec_command1, self.command])
mock_uds.assert_has_calls([call("NODEMANAGER", "STARTED")], [call("NODEMANAGER", "STARTED")])
mock_uds.reset_mock()
rm.update_config(12, 5, 1, 15, True, False, "NODEMANAGER", -1)
rm.process_execution_commands([self.exec_command1, self.exec_command2, self.exec_command3])
mock_uds.assert_has_calls([call("NODEMANAGER", "INSTALLED")], [call("NODEMANAGER", "STARTED")])
mock_uds.reset_mock()
rm.process_execution_commands([self.exec_command1, self.command])
mock_uds.assert_has_calls([call("NODEMANAGER", "INSTALLED")])
rm.process_execution_commands([self.exec_command4])
mock_uds.assert_has_calls([call("NODEMANAGER", "STARTED")])
pass
def test_defaults(self):
rm = RecoveryManager(tempfile.mktemp())
self.assertFalse(rm.enabled())
self.assertEqual(None, rm.get_install_command("NODEMANAGER"))
self.assertEqual(None, rm.get_start_command("NODEMANAGER"))
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "STARTED")
self.assertFalse(rm.requires_recovery("NODEMANAGER"))
pass
@patch.object(RecoveryManager, "_now_")
def test_sliding_window(self, time_mock):
time_mock.side_effect = \
[1000, 1001, 1002, 1003, 1004, 1071, 1150, 1151, 1152, 1153, 1400, 1401,
1500, 1571, 1572, 1653, 1900, 1971, 2300, 2301]
rm = RecoveryManager(tempfile.mktemp(), True, False)
self.assertTrue(rm.enabled())
config = rm.update_config(0, 60, 5, 12, True, False, "", -1)
self.assertFalse(rm.enabled())
rm.update_config(6, 60, 5, 12, True, False, "", -1)
self.assertTrue(rm.enabled())
rm.update_config(6, 0, 5, 12, True, False, "", -1)
self.assertFalse(rm.enabled())
rm.update_config(6, 60, 0, 12, True, False, "", -1)
self.assertFalse(rm.enabled())
rm.update_config(6, 60, 1, 12, True, False, None, -1)
self.assertTrue(rm.enabled())
rm.update_config(6, 60, 61, 12, True, False, None, -1)
self.assertFalse(rm.enabled())
rm.update_config(6, 60, 5, 4, True, False, "", -1)
self.assertFalse(rm.enabled())
# maximum 2 in 2 minutes and at least 1 minute wait
rm.update_config(2, 5, 1, 4, True, False, "", -1)
self.assertTrue(rm.enabled())
# T = 1000-2
self.assertTrue(rm.may_execute("NODEMANAGER"))
self.assertTrue(rm.may_execute("NODEMANAGER"))
self.assertTrue(rm.may_execute("NODEMANAGER"))
# T = 1003-4
self.assertTrue(rm.execute("NODEMANAGER"))
self.assertFalse(rm.execute("NODEMANAGER")) # too soon
# T = 1071
self.assertTrue(rm.execute("NODEMANAGER")) # 60+ seconds passed
# T = 1150-3
self.assertFalse(rm.execute("NODEMANAGER")) # limit 2 exceeded
self.assertFalse(rm.may_execute("NODEMANAGER"))
self.assertTrue(rm.execute("DATANODE"))
self.assertTrue(rm.may_execute("NAMENODE"))
# T = 1400-1
self.assertTrue(rm.execute("NODEMANAGER")) # windows reset
self.assertFalse(rm.may_execute("NODEMANAGER")) # too soon
# maximum 2 in 2 minutes and no min wait
rm.update_config(2, 5, 1, 5, True, True, "", -1)
# T = 1500-3
self.assertTrue(rm.execute("NODEMANAGER2"))
self.assertTrue(rm.may_execute("NODEMANAGER2"))
self.assertTrue(rm.execute("NODEMANAGER2"))
self.assertFalse(rm.execute("NODEMANAGER2")) # max limit
# T = 1900-2
self.assertTrue(rm.execute("NODEMANAGER2"))
self.assertTrue(rm.execute("NODEMANAGER2"))
# T = 2300-2
# lifetime max reached
self.assertTrue(rm.execute("NODEMANAGER2"))
self.assertFalse(rm.execute("NODEMANAGER2"))
pass
def test_recovery_required(self):
rm = RecoveryManager(tempfile.mktemp(), True, False)
rm.update_config(12, 5, 1, 15, True, False, "NODEMANAGER", -1)
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "INSTALLED")
self.assertFalse(rm.requires_recovery("NODEMANAGER"))
rm.update_desired_status("NODEMANAGER", "STARTED")
self.assertTrue(rm.requires_recovery("NODEMANAGER"))
rm.update_current_status("NODEMANAGER", "STARTED")
rm.update_desired_status("NODEMANAGER", "INSTALLED")
self.assertTrue(rm.requires_recovery("NODEMANAGER"))
rm.update_desired_status("NODEMANAGER", "STARTED")
self.assertFalse(rm.requires_recovery("NODEMANAGER"))
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "XYS")
self.assertFalse(rm.requires_recovery("NODEMANAGER"))
rm.update_desired_status("NODEMANAGER", "")
self.assertFalse(rm.requires_recovery("NODEMANAGER"))
rm.update_current_status("NODEMANAGER", "INIT")
rm.update_desired_status("NODEMANAGER", "INSTALLED")
self.assertTrue(rm.requires_recovery("NODEMANAGER"))
rm.update_desired_status("NODEMANAGER", "STARTED")
self.assertTrue(rm.requires_recovery("NODEMANAGER"))
rm = RecoveryManager(tempfile.mktemp(), True, True)
rm.update_current_status("NODEMANAGER", "INIT")
rm.update_desired_status("NODEMANAGER", "INSTALLED")
self.assertFalse(rm.requires_recovery("NODEMANAGER"))
rm.update_current_status("NODEMANAGER", "INIT")
rm.update_desired_status("NODEMANAGER", "START")
self.assertFalse(rm.requires_recovery("NODEMANAGER"))
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "START")
self.assertFalse(rm.requires_recovery("NODEMANAGER"))
pass
def test_recovery_required2(self):
rm = RecoveryManager(tempfile.mktemp(), True, True)
rm.update_config(15, 5, 1, 16, True, False, "NODEMANAGER", -1)
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "STARTED")
self.assertTrue(rm.requires_recovery("NODEMANAGER"))
rm = RecoveryManager(tempfile.mktemp(), True, True)
rm.update_config(15, 5, 1, 16, True, False, "NODEMANAGER", -1)
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "STARTED")
self.assertTrue(rm.requires_recovery("NODEMANAGER"))
rm.update_current_status("DATANODE", "INSTALLED")
rm.update_desired_status("DATANODE", "STARTED")
self.assertFalse(rm.requires_recovery("DATANODE"))
rm = RecoveryManager(tempfile.mktemp(), True, True)
rm.update_config(15, 5, 1, 16, True, False, "", -1)
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "STARTED")
self.assertFalse(rm.requires_recovery("NODEMANAGER"))
rm.update_current_status("DATANODE", "INSTALLED")
rm.update_desired_status("DATANODE", "STARTED")
self.assertFalse(rm.requires_recovery("DATANODE"))
rm.update_config(15, 5, 1, 16, True, False, "NODEMANAGER", -1)
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "STARTED")
self.assertTrue(rm.requires_recovery("NODEMANAGER"))
rm.update_current_status("DATANODE", "INSTALLED")
rm.update_desired_status("DATANODE", "STARTED")
self.assertFalse(rm.requires_recovery("DATANODE"))
pass
@patch('time.time', MagicMock(side_effects=[1]))
def test_store_from_status_and_use(self):
rm = RecoveryManager(tempfile.mktemp(), True)
command1 = copy.deepcopy(self.command)
rm.store_or_update_command(command1)
self.assertTrue(rm.command_exists("NODEMANAGER", "EXECUTION_COMMAND"))
install_command = rm.get_install_command("NODEMANAGER")
start_command = rm.get_start_command("NODEMANAGER")
self.assertEqual("INSTALL", install_command["roleCommand"])
self.assertEqual("START", start_command["roleCommand"])
self.assertEqual("AUTO_EXECUTION_COMMAND", install_command["commandType"])
self.assertEqual("AUTO_EXECUTION_COMMAND", start_command["commandType"])
self.assertEqual("NODEMANAGER", install_command["role"])
self.assertEqual("NODEMANAGER", start_command["role"])
self.assertEquals(install_command["configurations"], start_command["configurations"])
self.assertEqual(2, install_command["taskId"])
self.assertEqual(3, start_command["taskId"])
self.assertEqual(None, rm.get_install_command("component2"))
self.assertEqual(None, rm.get_start_command("component2"))
self.assertTrue(rm.remove_command("NODEMANAGER"))
self.assertFalse(rm.remove_command("NODEMANAGER"))
self.assertEqual(None, rm.get_install_command("NODEMANAGER"))
self.assertEqual(None, rm.get_start_command("NODEMANAGER"))
self.assertEqual(None, rm.get_install_command("component2"))
self.assertEqual(None, rm.get_start_command("component2"))
rm.store_or_update_command(command1)
self.assertTrue(rm.command_exists("NODEMANAGER", "EXECUTION_COMMAND"))
rm.set_paused(True)
self.assertEqual(None, rm.get_install_command("NODEMANAGER"))
self.assertEqual(None, rm.get_start_command("NODEMANAGER"))
pass
@patch.object(RecoveryManager, "_now_")
def test_get_recovery_commands(self, time_mock):
time_mock.side_effect = \
[1000, 1001, 1002, 1003,
1100, 1101, 1102,
1200, 1201, 1203,
4000, 4001, 4002, 4003,
4100, 4101, 4102, 4103,
4200, 4201, 4202,
4300, 4301, 4302]
rm = RecoveryManager(tempfile.mktemp(), True)
rm.update_config(15, 5, 1, 16, True, False, "", -1)
command1 = copy.deepcopy(self.command)
rm.store_or_update_command(command1)
rm.update_config(12, 5, 1, 15, True, False, "NODEMANAGER", -1)
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "STARTED")
self.assertEqual("INSTALLED", rm.get_current_status("NODEMANAGER"))
self.assertEqual("STARTED", rm.get_desired_status("NODEMANAGER"))
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("START", commands[0]["roleCommand"])
rm.update_current_status("NODEMANAGER", "INIT")
rm.update_desired_status("NODEMANAGER", "STARTED")
# Starts at 1100
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("INSTALL", commands[0]["roleCommand"])
rm.update_current_status("NODEMANAGER", "INIT")
rm.update_desired_status("NODEMANAGER", "INSTALLED")
# Starts at 1200
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("INSTALL", commands[0]["roleCommand"])
rm.update_config(2, 5, 1, 5, True, True, "", -1)
rm.update_current_status("NODEMANAGER", "INIT")
rm.update_desired_status("NODEMANAGER", "INSTALLED")
commands = rm.get_recovery_commands()
self.assertEqual(0, len(commands))
rm.update_config(12, 5, 1, 15, True, False, "NODEMANAGER", -1)
rm.update_current_status("NODEMANAGER", "INIT")
rm.update_desired_status("NODEMANAGER", "INSTALLED")
rm.store_or_update_command(command1)
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("INSTALL", commands[0]["roleCommand"])
rm.update_config_staleness("NODEMANAGER", False)
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "INSTALLED")
commands = rm.get_recovery_commands()
self.assertEqual(0, len(commands))
command_install = copy.deepcopy(self.command)
command_install["desiredState"] = "INSTALLED"
rm.store_or_update_command(command_install)
rm.update_config_staleness("NODEMANAGER", True)
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("INSTALL", commands[0]["roleCommand"])
rm.update_current_status("NODEMANAGER", "STARTED")
rm.update_desired_status("NODEMANAGER", "STARTED")
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("CUSTOM_COMMAND", commands[0]["roleCommand"])
self.assertEqual("RESTART", commands[0]["hostLevelParams"]["custom_command"])
rm.update_current_status("NODEMANAGER", "STARTED")
rm.update_desired_status("NODEMANAGER", "INSTALLED")
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("STOP", commands[0]["roleCommand"])
pass
@patch.object(RecoveryManager, "update_config")
def test_update_rm_config(self, mock_uc):
rm = RecoveryManager(tempfile.mktemp())
rm.update_configuration_from_registration(None)
mock_uc.assert_has_calls([call(6, 60, 5, 12, False, True, "", -1)])
mock_uc.reset_mock()
rm.update_configuration_from_registration({})
mock_uc.assert_has_calls([call(6, 60, 5, 12, False, True, "", -1)])
mock_uc.reset_mock()
rm.update_configuration_from_registration(
{"recoveryConfig": {
"type" : "DEFAULT"}}
)
mock_uc.assert_has_calls([call(6, 60, 5, 12, False, True, "", -1)])
mock_uc.reset_mock()
rm.update_configuration_from_registration(
{"recoveryConfig": {
"type" : "FULL"}}
)
mock_uc.assert_has_calls([call(6, 60, 5, 12, True, False, "", -1)])
mock_uc.reset_mock()
rm.update_configuration_from_registration(
{"recoveryConfig": {
"type" : "AUTO_START",
"max_count" : "med"}}
)
mock_uc.assert_has_calls([call(6, 60, 5, 12, True, True, "", -1)])
mock_uc.reset_mock()
rm.update_configuration_from_registration(
{"recoveryConfig": {
"type" : "AUTO_START",
"maxCount" : "5",
"windowInMinutes" : 20,
"retryGap" : 2,
"maxLifetimeCount" : 5,
"components" : " A,B",
"recoveryTimestamp" : 1}}
)
mock_uc.assert_has_calls([call(5, 20, 2, 5, True, True, " A,B", 1)])
pass
@patch.object(RecoveryManager, "_now_")
def test_recovery_report(self, time_mock):
time_mock.side_effect = \
[1000, 1071, 1072, 1470, 1471, 1472, 1543, 1644, 1815]
rm = RecoveryManager(tempfile.mktemp())
rec_st = rm.get_recovery_status()
self.assertEquals(rec_st, {"summary": "DISABLED"})
rm.update_config(2, 5, 1, 4, True, True, "", -1)
rec_st = rm.get_recovery_status()
self.assertEquals(rec_st, {"summary": "RECOVERABLE", "componentReports": []})
rm.execute("PUMA")
rec_st = rm.get_recovery_status()
self.assertEquals(rec_st, {"summary": "RECOVERABLE",
"componentReports": [{"name": "PUMA", "numAttempts": 1, "limitReached": False}]})
rm.execute("PUMA")
rm.execute("LION")
rec_st = rm.get_recovery_status()
self.assertEquals(rec_st, {"summary": "RECOVERABLE",
"componentReports": [
{"name": "LION", "numAttempts": 1, "limitReached": False},
{"name": "PUMA", "numAttempts": 2, "limitReached": False}
]})
rm.execute("PUMA")
rm.execute("LION")
rm.execute("PUMA")
rm.execute("PUMA")
rm.execute("LION")
rec_st = rm.get_recovery_status()
self.assertEquals(rec_st, {"summary": "PARTIALLY_RECOVERABLE",
"componentReports": [
{"name": "LION", "numAttempts": 3, "limitReached": False},
{"name": "PUMA", "numAttempts": 4, "limitReached": True}
]})
rm.execute("LION")
rec_st = rm.get_recovery_status()
self.assertEquals(rec_st, {"summary": "UNRECOVERABLE",
"componentReports": [
{"name": "LION", "numAttempts": 4, "limitReached": True},
{"name": "PUMA", "numAttempts": 4, "limitReached": True}
]})
pass
@patch.object(RecoveryManager, "_now_")
def test_command_expiry(self, time_mock):
time_mock.side_effect = \
[1000, 1001, 1002, 1003, 1104, 1105, 1106, 1807, 1808, 1809, 1810, 1811, 1812]
rm = RecoveryManager(tempfile.mktemp(), True)
rm.update_config(5, 5, 1, 11, True, False, "", -1)
command1 = copy.deepcopy(self.command)
rm.store_or_update_command(command1)
rm.update_config(12, 5, 1, 15, True, False, "NODEMANAGER", -1)
rm.update_current_status("NODEMANAGER", "INSTALLED")
rm.update_desired_status("NODEMANAGER", "STARTED")
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("START", commands[0]["roleCommand"])
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("START", commands[0]["roleCommand"])
#1807 command is stale
commands = rm.get_recovery_commands()
self.assertEqual(0, len(commands))
rm.store_or_update_command(command1)
commands = rm.get_recovery_commands()
self.assertEqual(1, len(commands))
self.assertEqual("START", commands[0]["roleCommand"])
pass
def test_command_count(self):
rm = RecoveryManager(tempfile.mktemp(), True)
self.assertFalse(rm.has_active_command())
rm.start_execution_command()
self.assertTrue(rm.has_active_command())
rm.start_execution_command()
self.assertTrue(rm.has_active_command())
rm.stop_execution_command()
self.assertTrue(rm.has_active_command())
rm.stop_execution_command()
self.assertFalse(rm.has_active_command())
def test_configured_for_recovery(self):
rm = RecoveryManager(tempfile.mktemp(), True)
rm.update_config(12, 5, 1, 15, True, False, "A,B", -1)
self.assertTrue(rm.configured_for_recovery("A"))
self.assertTrue(rm.configured_for_recovery("B"))
rm.update_config(5, 5, 1, 11, True, False, "", -1)
self.assertFalse(rm.configured_for_recovery("A"))
self.assertFalse(rm.configured_for_recovery("B"))
rm.update_config(5, 5, 1, 11, True, False, "A", -1)
self.assertTrue(rm.configured_for_recovery("A"))
self.assertFalse(rm.configured_for_recovery("B"))
rm.update_config(5, 5, 1, 11, True, False, "A", -1)
self.assertTrue(rm.configured_for_recovery("A"))
self.assertFalse(rm.configured_for_recovery("B"))
self.assertFalse(rm.configured_for_recovery("C"))
rm.update_config(5, 5, 1, 11, True, False, "A, D, F ", -1)
self.assertTrue(rm.configured_for_recovery("A"))
self.assertFalse(rm.configured_for_recovery("B"))
self.assertFalse(rm.configured_for_recovery("C"))
self.assertTrue(rm.configured_for_recovery("D"))
self.assertFalse(rm.configured_for_recovery("E"))
self.assertTrue(rm.configured_for_recovery("F"))
@patch.object(RecoveryManager, "_now_")
def test_reset_if_window_passed_since_last_attempt(self, time_mock):
time_mock.side_effect = \
[1000, 1071, 1372]
rm = RecoveryManager(tempfile.mktemp(), True)
rm.update_config(2, 5, 1, 4, True, True, "", -1)
rm.execute("COMPONENT")
actions = rm.get_actions_copy()["COMPONENT"]
self.assertEquals(actions['lastReset'], 1000)
rm.execute("COMPONENT")
actions = rm.get_actions_copy()["COMPONENT"]
self.assertEquals(actions['lastReset'], 1000)
#reset if window_in_sec seconds passed since last attempt
rm.execute("COMPONENT")
actions = rm.get_actions_copy()["COMPONENT"]
self.assertEquals(actions['lastReset'], 1372)
@patch.object(RecoveryManager, "_now_")
def test_is_action_info_stale(self, time_mock):
rm = RecoveryManager(tempfile.mktemp(), True)
rm.update_config(5, 60, 5, 16, True, False, "", -1)
time_mock.return_value = 0
self.assertFalse(rm.is_action_info_stale("COMPONENT_NAME"))
rm.actions["COMPONENT_NAME"] = {
"lastAttempt": 0,
"count": 0,
"lastReset": 0,
"lifetimeCount": 0,
"warnedLastAttempt": False,
"warnedLastReset": False,
"warnedThresholdReached": False
}
time_mock.return_value = 3600
self.assertFalse(rm.is_action_info_stale("COMPONENT_NAME"))
rm.actions["COMPONENT_NAME"] = {
"lastAttempt": 1,
"count": 1,
"lastReset": 0,
"lifetimeCount": 1,
"warnedLastAttempt": False,
"warnedLastReset": False,
"warnedThresholdReached": False
}
time_mock.return_value = 3601
self.assertFalse(rm.is_action_info_stale("COMPONENT_NAME"))
time_mock.return_value = 3602
self.assertTrue(rm.is_action_info_stale("COMPONENT_NAME"))
| 36.339233 | 112 | 0.679357 |
7673fa56162b73fe08884294d454dc83f0b5560c | 119 | py | Python | 02_excel_is_dead_long_live_pandas/src/_solutions/tab_data02.py | hello-world-academy/beiersdorf_05-06-2019 | 79b244ae21fa2815bf429653ccf6b51ddc468901 | [
"MIT"
] | null | null | null | 02_excel_is_dead_long_live_pandas/src/_solutions/tab_data02.py | hello-world-academy/beiersdorf_05-06-2019 | 79b244ae21fa2815bf429653ccf6b51ddc468901 | [
"MIT"
] | null | null | null | 02_excel_is_dead_long_live_pandas/src/_solutions/tab_data02.py | hello-world-academy/beiersdorf_05-06-2019 | 79b244ae21fa2815bf429653ccf6b51ddc468901 | [
"MIT"
] | null | null | null | pop.columns
pop.location_name.nunique()
pop.location_name.sample(10)
pop.sex_name.unique()
pop.age_group_name.unique()
| 19.833333 | 28 | 0.815126 |
944c45da790d745ea220ef7c7c6763d545423feb | 4,329 | py | Python | huaweicloud-sdk-dds/huaweicloudsdkdds/v3/model/download_slowlog_response.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | 1 | 2021-11-03T07:54:50.000Z | 2021-11-03T07:54:50.000Z | huaweicloud-sdk-dds/huaweicloudsdkdds/v3/model/download_slowlog_response.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | null | null | null | huaweicloud-sdk-dds/huaweicloudsdkdds/v3/model/download_slowlog_response.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class DownloadSlowlogResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'list': 'list[DownloadSlowlogResult]',
'status': 'str',
'count': 'int'
}
attribute_map = {
'list': 'list',
'status': 'status',
'count': 'count'
}
def __init__(self, list=None, status=None, count=None):
"""DownloadSlowlogResponse - a model defined in huaweicloud sdk"""
super(DownloadSlowlogResponse, self).__init__()
self._list = None
self._status = None
self._count = None
self.discriminator = None
if list is not None:
self.list = list
if status is not None:
self.status = status
if count is not None:
self.count = count
@property
def list(self):
"""Gets the list of this DownloadSlowlogResponse.
具体信息。
:return: The list of this DownloadSlowlogResponse.
:rtype: list[DownloadSlowlogResult]
"""
return self._list
@list.setter
def list(self, list):
"""Sets the list of this DownloadSlowlogResponse.
具体信息。
:param list: The list of this DownloadSlowlogResponse.
:type: list[DownloadSlowlogResult]
"""
self._list = list
@property
def status(self):
"""Gets the status of this DownloadSlowlogResponse.
慢日志下载链接生成状态。 - FINISH,表示下载链接已经生成完成。 - CREATING,表示正在生成文件,准备下载链接。 - FAILED,表示存在日志文件准备失败。
:return: The status of this DownloadSlowlogResponse.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this DownloadSlowlogResponse.
慢日志下载链接生成状态。 - FINISH,表示下载链接已经生成完成。 - CREATING,表示正在生成文件,准备下载链接。 - FAILED,表示存在日志文件准备失败。
:param status: The status of this DownloadSlowlogResponse.
:type: str
"""
self._status = status
@property
def count(self):
"""Gets the count of this DownloadSlowlogResponse.
慢日志链接数量。
:return: The count of this DownloadSlowlogResponse.
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this DownloadSlowlogResponse.
慢日志链接数量。
:param count: The count of this DownloadSlowlogResponse.
:type: int
"""
self._count = count
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DownloadSlowlogResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.236364 | 94 | 0.560869 |
eb5493ac3694986b0673db4dc9ec0439e8080e77 | 5,118 | py | Python | torch_pitch_shift/main.py | ilaria-manco/torch-pitch-shift | 1e534ec8a385cbbd5f1f0b203c4835c08ea6eae6 | [
"MIT"
] | null | null | null | torch_pitch_shift/main.py | ilaria-manco/torch-pitch-shift | 1e534ec8a385cbbd5f1f0b203c4835c08ea6eae6 | [
"MIT"
] | null | null | null | torch_pitch_shift/main.py | ilaria-manco/torch-pitch-shift | 1e534ec8a385cbbd5f1f0b203c4835c08ea6eae6 | [
"MIT"
] | null | null | null | from collections import Counter
from fractions import Fraction
from functools import reduce
from itertools import chain, count, islice, repeat
from typing import Union, Callable, List, Optional
from torch.nn.functional import pad
import torch
import torchaudio.transforms as T
from primePy import primes
from math import log2
import warnings
warnings.simplefilter("ignore")
# https://stackoverflow.com/a/46623112/9325832
def _combinations_without_repetition(r, iterable=None, values=None, counts=None):
if iterable:
values, counts = zip(*Counter(iterable).items())
f = lambda i, c: chain.from_iterable(map(repeat, i, c))
n = len(counts)
indices = list(islice(f(count(), counts), r))
if len(indices) < r:
return
while True:
yield tuple(values[i] for i in indices)
for i, j in zip(reversed(range(r)), f(reversed(range(n)), reversed(counts))):
if indices[i] != j:
break
else:
return
j = indices[i] + 1
for i, j in zip(range(i, r), f(count(j), counts[j:])):
indices[i] = j
def get_fast_shifts(
sample_rate: int,
condition: Optional[Callable] = lambda x: x >= 0.5 and x <= 2 and x != 1,
) -> List[Fraction]:
"""
Search for pitch-shift targets that can be computed quickly for a given sample rate.
Parameters
----------
sample_rate: int
The sample rate of an audio clip.
condition: Callable [optional]
A function to validate fast shift ratios.
Default is `lambda x: x >= 0.5 and x <= 2 and x != 1` (between -1 and +1 octaves).
Returns
-------
output: List[Fraction]
A list of fast pitch-shift target ratios
"""
fast_shifts = set()
factors = primes.factors(sample_rate)
products = []
for i in range(1, len(factors) + 1):
products.extend(
[
reduce(lambda x, y: x * y, x)
for x in _combinations_without_repetition(i, iterable=factors)
]
)
for i in products:
for j in products:
f = Fraction(i, j)
if condition(f):
fast_shifts.add(f)
return list(fast_shifts)
def semitones_to_ratio(semitones: float) -> Fraction:
"""
Convert semitonal shifts into ratios.
Parameters
----------
semitones: float
The number of semitones for a desired shift.
Returns
-------
output: Fraction
A Fraction indicating a pitch shift ratio
"""
return Fraction(2.0 ** (semitones / 12.0))
def ratio_to_semitones(ratio: Fraction) -> float:
"""
Convert rational shifts to semitones.
Parameters
----------
ratio: Fraction
The ratio for a desired shift.
Returns
-------
output: float
The magnitude of a pitch shift in semitones
"""
return float(12.0 * log2(ratio))
def pitch_shift(
input: torch.Tensor,
shift: Union[float, Fraction],
sample_rate: int,
bins_per_octave: Optional[int] = 12,
n_fft: Optional[int] = 0,
hop_length: Optional[int] = 0,
) -> torch.Tensor:
"""
Shift the pitch of a batch of waveforms by a given amount.
Parameters
----------
input: torch.Tensor [shape=(batch_size, channels, samples)]
Input audio clips of shape (batch_size, channels, samples)
shift: float OR Fraction
`float`: Amount to pitch-shift in # of bins. (1 bin == 1 semitone if `bins_per_octave` == 12)
`Fraction`: A `fractions.Fraction` object indicating the shift ratio. Usually an element in `get_fast_shifts()`.
sample_rate: int
The sample rate of the input audio clips.
bins_per_octave: int [optional]
Number of bins per octave. Default is 12.
n_fft: int [optional]
Size of FFT. Default is `sample_rate // 64`.
hop_length: int [optional]
Size of hop length. Default is `n_fft // 32`.
Returns
-------
output: torch.Tensor [shape=(batch_size, channels, samples)]
The pitch-shifted batch of audio clips
"""
if not n_fft:
n_fft = sample_rate // 64
if not hop_length:
hop_length = n_fft // 32
batch_size, channels, samples = input.shape
if not isinstance(shift, Fraction):
shift = 2.0 ** (float(shift) / bins_per_octave)
resampler = T.Resample(sample_rate, int(sample_rate / shift)).to(input.device)
output = input
output = output.reshape(batch_size * channels, samples)
output = torch.stft(output, n_fft, hop_length, return_complex=True)[None, ...]
stretcher = T.TimeStretch(
fixed_rate=float(1 / shift), n_freq=output.shape[2], hop_length=hop_length
).to(input.device)
output = stretcher(output)
output = torch.istft(output[0], n_fft, hop_length)
output = resampler(output)
del resampler, stretcher
if output.shape[1] >= input.shape[2]:
output = output[:, : (input.shape[2])]
else:
output = pad(output, pad=(0, input.shape[2] - output.shape[1], 0, 0))
output = output.reshape(batch_size, channels, samples)
return output
| 30.646707 | 120 | 0.623877 |
076895866604830f7d19970ebf55e12f087c728b | 47,608 | py | Python | lib/matplotlib/__init__.py | chebee7i/matplotlib | ebde05f5c1579c2ca28dab2a11ae5a674e4aac2f | [
"MIT",
"BSD-3-Clause"
] | null | null | null | lib/matplotlib/__init__.py | chebee7i/matplotlib | ebde05f5c1579c2ca28dab2a11ae5a674e4aac2f | [
"MIT",
"BSD-3-Clause"
] | null | null | null | lib/matplotlib/__init__.py | chebee7i/matplotlib | ebde05f5c1579c2ca28dab2a11ae5a674e4aac2f | [
"MIT",
"BSD-3-Clause"
] | null | null | null | """
This is an object-oriented plotting library.
A procedural interface is provided by the companion pyplot module,
which may be imported directly, e.g.::
import matplotlib.pyplot as plt
or using ipython::
ipython
at your terminal, followed by::
In [1]: %matplotlib
In [2]: import matplotlib.pyplot as plt
at the ipython shell prompt.
For the most part, direct use of the object-oriented library is
encouraged when programming; pyplot is primarily for working
interactively. The
exceptions are the pyplot commands :func:`~matplotlib.pyplot.figure`,
:func:`~matplotlib.pyplot.subplot`,
:func:`~matplotlib.pyplot.subplots`, and
:func:`~pyplot.savefig`, which can greatly simplify scripting.
Modules include:
:mod:`matplotlib.axes`
defines the :class:`~matplotlib.axes.Axes` class. Most pylab
commands are wrappers for :class:`~matplotlib.axes.Axes`
methods. The axes module is the highest level of OO access to
the library.
:mod:`matplotlib.figure`
defines the :class:`~matplotlib.figure.Figure` class.
:mod:`matplotlib.artist`
defines the :class:`~matplotlib.artist.Artist` base class for
all classes that draw things.
:mod:`matplotlib.lines`
defines the :class:`~matplotlib.lines.Line2D` class for
drawing lines and markers
:mod:`matplotlib.patches`
defines classes for drawing polygons
:mod:`matplotlib.text`
defines the :class:`~matplotlib.text.Text`,
:class:`~matplotlib.text.TextWithDash`, and
:class:`~matplotlib.text.Annotate` classes
:mod:`matplotlib.image`
defines the :class:`~matplotlib.image.AxesImage` and
:class:`~matplotlib.image.FigureImage` classes
:mod:`matplotlib.collections`
classes for efficient drawing of groups of lines or polygons
:mod:`matplotlib.colors`
classes for interpreting color specifications and for making
colormaps
:mod:`matplotlib.cm`
colormaps and the :class:`~matplotlib.image.ScalarMappable`
mixin class for providing color mapping functionality to other
classes
:mod:`matplotlib.ticker`
classes for calculating tick mark locations and for formatting
tick labels
:mod:`matplotlib.backends`
a subpackage with modules for various gui libraries and output
formats
The base matplotlib namespace includes:
:data:`~matplotlib.rcParams`
a global dictionary of default configuration settings. It is
initialized by code which may be overridded by a matplotlibrc
file.
:func:`~matplotlib.rc`
a function for setting groups of rcParams values
:func:`~matplotlib.use`
a function for setting the matplotlib backend. If used, this
function must be called immediately after importing matplotlib
for the first time. In particular, it must be called
**before** importing pylab (if pylab is imported).
matplotlib was initially written by John D. Hunter (1968-2012) and is now
developed and maintained by a host of others.
Occasionally the internal documentation (python docstrings) will refer
to MATLAB®, a registered trademark of The MathWorks, Inc.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import sys
import distutils.version
from itertools import chain
__version__ = str('1.5.x')
__version__numpy__ = str('1.6') # minimum required numpy version
try:
import dateutil
except ImportError:
raise ImportError("matplotlib requires dateutil")
def compare_versions(a, b):
"return True if a is greater than or equal to b"
if a:
if six.PY3:
if isinstance(a, bytes):
a = a.decode('ascii')
if isinstance(b, bytes):
b = b.decode('ascii')
a = distutils.version.LooseVersion(a)
b = distutils.version.LooseVersion(b)
return a >= b
else:
return False
if not compare_versions(six.__version__, '1.3'):
raise ImportError(
'six 1.3 or later is required; you have %s' % (
six.__version__))
try:
import pyparsing
except ImportError:
raise ImportError("matplotlib requires pyparsing")
else:
if not compare_versions(pyparsing.__version__, '1.5.6'):
raise ImportError(
"matplotlib requires pyparsing >= 1.5.6")
# pyparsing 2.0.0 bug, but it may be patched in distributions
try:
f = pyparsing.Forward()
f <<= pyparsing.Literal('a')
bad_pyparsing = f is None
except TypeError:
bad_pyparsing = True
# pyparsing 1.5.6 does not have <<= on the Forward class, but
# pyparsing 2.0.0 and later will spew deprecation warnings if
# using << instead. Additionally, the <<= in pyparsing 1.5.7 is
# broken, since it doesn't return self. In order to support
# pyparsing 1.5.6 and above with a common code base, this small
# monkey patch is applied.
if bad_pyparsing:
def _forward_ilshift(self, other):
self.__lshift__(other)
return self
pyparsing.Forward.__ilshift__ = _forward_ilshift
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
import io
import locale
import os
import re
import tempfile
import warnings
import contextlib
import distutils.sysconfig
# cbook must import matplotlib only within function
# definitions, so it is safe to import from it here.
from matplotlib.cbook import is_string_like
from matplotlib.compat import subprocess
try:
reload
except NameError:
# Python 3
from imp import reload
if not hasattr(sys, 'argv'): # for modpython
sys.argv = [str('modpython')]
from matplotlib.rcsetup import (defaultParams,
validate_backend)
major, minor1, minor2, s, tmp = sys.version_info
_python24 = (major == 2 and minor1 >= 4) or major >= 3
# the havedate check was a legacy from old matplotlib which preceeded
# datetime support
_havedate = True
if not _python24:
raise ImportError('matplotlib requires Python 2.4 or later')
import numpy
if not compare_versions(numpy.__version__, __version__numpy__):
raise ImportError(
'numpy %s or later is required; you have %s' % (
__version__numpy__, numpy.__version__))
def _is_writable_dir(p):
"""
p is a string pointing to a putative writable dir -- return True p
is such a string, else False
"""
try:
p + '' # test is string like
except TypeError:
return False
# Test whether the operating system thinks it's a writable directory.
# Note that this check is necessary on Google App Engine, because the
# subsequent check will succeed even though p may not be writable.
if not os.access(p, os.W_OK) or not os.path.isdir(p):
return False
# Also test that it is actually possible to write to a file here.
try:
t = tempfile.TemporaryFile(dir=p)
try:
t.write(b'1')
finally:
t.close()
except OSError:
return False
return True
class Verbose(object):
"""
A class to handle reporting. Set the fileo attribute to any file
instance to handle the output. Default is sys.stdout
"""
levels = ('silent', 'helpful', 'debug', 'debug-annoying')
vald = dict([(level, i) for i, level in enumerate(levels)])
# parse the verbosity from the command line; flags look like
# --verbose-silent or --verbose-helpful
_commandLineVerbose = None
for arg in sys.argv[1:]:
# cast to str because we are using unicode_literals,
# and argv is always str
if not arg.startswith(str('--verbose-')):
continue
level_str = arg[10:]
# If it doesn't match one of ours, then don't even
# bother noting it, we are just a 3rd-party library
# to somebody else's script.
if level_str in levels:
_commandLineVerbose = level_str
def __init__(self):
self.set_level('silent')
self.fileo = sys.stdout
def set_level(self, level):
'set the verbosity to one of the Verbose.levels strings'
if self._commandLineVerbose is not None:
level = self._commandLineVerbose
if level not in self.levels:
warnings.warn('matplotlib: unrecognized --verbose-* string "%s".'
' Legal values are %s' % (level, self.levels))
else:
self.level = level
def set_fileo(self, fname):
std = {
'sys.stdout': sys.stdout,
'sys.stderr': sys.stderr,
}
if fname in std:
self.fileo = std[fname]
else:
try:
fileo = open(fname, 'w')
except IOError:
raise ValueError('Verbose object could not open log file "{}"'
' for writing.\nCheck your matplotlibrc '
'verbose.fileo setting'.format(fname))
else:
self.fileo = fileo
def report(self, s, level='helpful'):
"""
print message s to self.fileo if self.level>=level. Return
value indicates whether a message was issued
"""
if self.ge(level):
print(s, file=self.fileo)
return True
return False
def wrap(self, fmt, func, level='helpful', always=True):
"""
return a callable function that wraps func and reports it
output through the verbose handler if current verbosity level
is higher than level
if always is True, the report will occur on every function
call; otherwise only on the first time the function is called
"""
assert six.callable(func)
def wrapper(*args, **kwargs):
ret = func(*args, **kwargs)
if (always or not wrapper._spoke):
spoke = self.report(fmt % ret, level)
if not wrapper._spoke:
wrapper._spoke = spoke
return ret
wrapper._spoke = False
wrapper.__doc__ = func.__doc__
return wrapper
def ge(self, level):
'return true if self.level is >= level'
return self.vald[self.level] >= self.vald[level]
verbose = Verbose()
def checkdep_dvipng():
try:
s = subprocess.Popen(['dvipng', '-version'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
line = stdout.decode('ascii').split('\n')[1]
v = line.split()[-1]
return v
except (IndexError, ValueError, OSError):
return None
def checkdep_ghostscript():
if sys.platform == 'win32':
gs_execs = ['gswin32c', 'gswin64c', 'gs']
else:
gs_execs = ['gs']
for gs_exec in gs_execs:
try:
s = subprocess.Popen(
[gs_exec, '--version'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
if s.returncode == 0:
v = stdout[:-1].decode('ascii')
return gs_exec, v
except (IndexError, ValueError, OSError):
pass
return None, None
def checkdep_tex():
try:
s = subprocess.Popen(['tex', '-version'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
line = stdout.decode('ascii').split('\n')[0]
pattern = '3\.1\d+'
match = re.search(pattern, line)
v = match.group(0)
return v
except (IndexError, ValueError, AttributeError, OSError):
return None
def checkdep_pdftops():
try:
s = subprocess.Popen(['pdftops', '-v'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
lines = stderr.decode('ascii').split('\n')
for line in lines:
if 'version' in line:
v = line.split()[-1]
return v
except (IndexError, ValueError, UnboundLocalError, OSError):
return None
def checkdep_inkscape():
try:
s = subprocess.Popen(['inkscape', '-V'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
lines = stdout.decode('ascii').split('\n')
for line in lines:
if 'Inkscape' in line:
v = line.split()[1]
break
return v
except (IndexError, ValueError, UnboundLocalError, OSError):
return None
def checkdep_xmllint():
try:
s = subprocess.Popen(['xmllint', '--version'], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = s.communicate()
lines = stderr.decode('ascii').split('\n')
for line in lines:
if 'version' in line:
v = line.split()[-1]
break
return v
except (IndexError, ValueError, UnboundLocalError, OSError):
return None
def checkdep_ps_distiller(s):
if not s:
return False
flag = True
gs_req = '7.07'
gs_sugg = '7.07'
gs_exec, gs_v = checkdep_ghostscript()
if compare_versions(gs_v, gs_sugg):
pass
elif compare_versions(gs_v, gs_req):
verbose.report(('ghostscript-%s found. ghostscript-%s or later '
'is recommended to use the ps.usedistiller option.')
% (gs_v, gs_sugg))
else:
flag = False
warnings.warn(('matplotlibrc ps.usedistiller option can not be used '
'unless ghostscript-%s or later is installed on your '
'system') % gs_req)
if s == 'xpdf':
pdftops_req = '3.0'
pdftops_req_alt = '0.9' # poppler version numbers, ugh
pdftops_v = checkdep_pdftops()
if compare_versions(pdftops_v, pdftops_req):
pass
elif (compare_versions(pdftops_v, pdftops_req_alt) and not
compare_versions(pdftops_v, '1.0')):
pass
else:
flag = False
warnings.warn(('matplotlibrc ps.usedistiller can not be set to '
'xpdf unless xpdf-%s or later is installed on '
'your system') % pdftops_req)
if flag:
return s
else:
return False
def checkdep_usetex(s):
if not s:
return False
tex_req = '3.1415'
gs_req = '7.07'
gs_sugg = '7.07'
dvipng_req = '1.5'
flag = True
tex_v = checkdep_tex()
if compare_versions(tex_v, tex_req):
pass
else:
flag = False
warnings.warn(('matplotlibrc text.usetex option can not be used '
'unless TeX-%s or later is '
'installed on your system') % tex_req)
dvipng_v = checkdep_dvipng()
if compare_versions(dvipng_v, dvipng_req):
pass
else:
flag = False
warnings.warn('matplotlibrc text.usetex can not be used with *Agg '
'backend unless dvipng-1.5 or later is '
'installed on your system')
gs_exec, gs_v = checkdep_ghostscript()
if compare_versions(gs_v, gs_sugg):
pass
elif compare_versions(gs_v, gs_req):
verbose.report(('ghostscript-%s found. ghostscript-%s or later is '
'recommended for use with the text.usetex '
'option.') % (gs_v, gs_sugg))
else:
flag = False
warnings.warn(('matplotlibrc text.usetex can not be used '
'unless ghostscript-%s or later is '
'installed on your system') % gs_req)
return flag
def _get_home():
"""Find user's home directory if possible.
Otherwise, returns None.
:see:
http://mail.python.org/pipermail/python-list/2005-February/325395.html
"""
try:
if six.PY2 and sys.platform == 'win32':
path = os.path.expanduser(b"~").decode(sys.getfilesystemencoding())
else:
path = os.path.expanduser("~")
except ImportError:
# This happens on Google App Engine (pwd module is not present).
pass
else:
if os.path.isdir(path):
return path
for evar in ('HOME', 'USERPROFILE', 'TMP'):
path = os.environ.get(evar)
if path is not None and os.path.isdir(path):
return path
return None
def _create_tmp_config_dir():
"""
If the config directory can not be created, create a temporary
directory.
Returns None if a writable temporary directory could not be created.
"""
import getpass
import tempfile
try:
tempdir = tempfile.gettempdir()
except NotImplementedError:
# Some restricted platforms (such as Google App Engine) do not provide
# gettempdir.
return None
tempdir = os.path.join(tempdir, 'matplotlib-%s' % getpass.getuser())
os.environ['MPLCONFIGDIR'] = tempdir
return tempdir
get_home = verbose.wrap('$HOME=%s', _get_home, always=False)
def _get_xdg_config_dir():
"""
Returns the XDG configuration directory, according to the `XDG
base directory spec
<http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_.
"""
path = os.environ.get('XDG_CONFIG_HOME')
if path is None:
path = get_home()
if path is not None:
path = os.path.join(path, '.config')
return path
def _get_xdg_cache_dir():
"""
Returns the XDG cache directory, according to the `XDG
base directory spec
<http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_.
"""
path = os.environ.get('XDG_CACHE_HOME')
if path is None:
path = get_home()
if path is not None:
path = os.path.join(path, '.cache')
return path
def _get_config_or_cache_dir(xdg_base):
from matplotlib.cbook import mkdirs
configdir = os.environ.get('MPLCONFIGDIR')
if configdir is not None:
if not os.path.exists(configdir):
from matplotlib.cbook import mkdirs
mkdirs(configdir)
if not _is_writable_dir(configdir):
return _create_tmp_config_dir()
return configdir
p = None
h = get_home()
if h is not None:
p = os.path.join(h, '.matplotlib')
if (sys.platform.startswith('linux') and xdg_base):
p = os.path.join(xdg_base, 'matplotlib')
if p is not None:
if os.path.exists(p):
if _is_writable_dir(p):
return p
else:
try:
mkdirs(p)
except OSError:
pass
else:
return p
return _create_tmp_config_dir()
def _get_configdir():
"""
Return the string representing the configuration directory.
The directory is chosen as follows:
1. If the MPLCONFIGDIR environment variable is supplied, choose that.
2a. On Linux, if `$HOME/.matplotlib` exists, choose that, but warn that
that is the old location. Barring that, follow the XDG specification
and look first in `$XDG_CONFIG_HOME`, if defined, or `$HOME/.config`.
2b. On other platforms, choose `$HOME/.matplotlib`.
3. If the chosen directory exists and is writable, use that as the
configuration directory.
4. If possible, create a temporary directory, and use it as the
configuration directory.
5. A writable directory could not be found or created; return None.
"""
return _get_config_or_cache_dir(_get_xdg_config_dir())
get_configdir = verbose.wrap('CONFIGDIR=%s', _get_configdir, always=False)
def _get_cachedir():
"""
Return the location of the cache directory.
The procedure used to find the directory is the same as for
_get_config_dir, except using `$XDG_CACHE_HOME`/`~/.cache` instead.
"""
return _get_config_or_cache_dir(_get_xdg_cache_dir())
get_cachedir = verbose.wrap('CACHEDIR=%s', _get_cachedir, always=False)
def _get_data_path():
'get the path to matplotlib data'
if 'MATPLOTLIBDATA' in os.environ:
path = os.environ['MATPLOTLIBDATA']
if not os.path.isdir(path):
raise RuntimeError('Path in environment MATPLOTLIBDATA not a '
'directory')
return path
path = os.sep.join([os.path.dirname(__file__), 'mpl-data'])
if os.path.isdir(path):
return path
# setuptools' namespace_packages may highjack this init file
# so need to try something known to be in matplotlib, not basemap
import matplotlib.afm
path = os.sep.join([os.path.dirname(matplotlib.afm.__file__), 'mpl-data'])
if os.path.isdir(path):
return path
# py2exe zips pure python, so still need special check
if getattr(sys, 'frozen', None):
exe_path = os.path.dirname(sys.executable)
path = os.path.join(exe_path, 'mpl-data')
if os.path.isdir(path):
return path
# Try again assuming we need to step up one more directory
path = os.path.join(os.path.split(exe_path)[0], 'mpl-data')
if os.path.isdir(path):
return path
# Try again assuming sys.path[0] is a dir not a exe
path = os.path.join(sys.path[0], 'mpl-data')
if os.path.isdir(path):
return path
raise RuntimeError('Could not find the matplotlib data files')
def _get_data_path_cached():
if defaultParams['datapath'][0] is None:
defaultParams['datapath'][0] = _get_data_path()
return defaultParams['datapath'][0]
get_data_path = verbose.wrap('matplotlib data path %s', _get_data_path_cached,
always=False)
def get_example_data(fname):
"""
get_example_data is deprecated -- use matplotlib.cbook.get_sample_data
instead
"""
raise NotImplementedError('get_example_data is deprecated -- use '
'matplotlib.cbook.get_sample_data instead')
def get_py2exe_datafiles():
datapath = get_data_path()
_, tail = os.path.split(datapath)
d = {}
for root, _, files in os.walk(datapath):
# Need to explicitly remove cocoa_agg files or py2exe complains
# NOTE I dont know why, but do as previous version
if 'Matplotlib.nib' in files:
files.remove('Matplotlib.nib')
files = [os.path.join(root, filename) for filename in files]
root = root.replace(tail, 'mpl-data')
root = root[root.index('mpl-data'):]
d[root] = files
return list(d.items())
def matplotlib_fname():
"""
Get the location of the config file.
The file location is determined in the following order
- `$PWD/matplotlibrc`
- environment variable `MATPLOTLIBRC`
- `$MPLCONFIGDIR/matplotlib`
- On Linux,
- `$HOME/.matplotlib/matplotlibrc`, if it exists
- or `$XDG_CONFIG_HOME/matplotlib/matplotlibrc` (if
$XDG_CONFIG_HOME is defined)
- or `$HOME/.config/matplotlib/matplotlibrc` (if
$XDG_CONFIG_HOME is not defined)
- On other platforms,
- `$HOME/.matplotlib/matplotlibrc` if `$HOME` is defined.
- Lastly, it looks in `$MATPLOTLIBDATA/matplotlibrc` for a
system-defined copy.
"""
if six.PY2:
cwd = os.getcwdu()
else:
cwd = os.getcwd()
fname = os.path.join(cwd, 'matplotlibrc')
if os.path.exists(fname):
return fname
if 'MATPLOTLIBRC' in os.environ:
path = os.environ['MATPLOTLIBRC']
if os.path.exists(path):
fname = os.path.join(path, 'matplotlibrc')
if os.path.exists(fname):
return fname
configdir = _get_configdir()
if configdir is not None:
fname = os.path.join(configdir, 'matplotlibrc')
if os.path.exists(fname):
home = get_home()
if (sys.platform.startswith('linux') and
home is not None and
os.path.exists(os.path.join(
home, '.matplotlib', 'matplotlibrc'))):
warnings.warn(
"Found matplotlib configuration in ~/.matplotlib/. "
"To conform with the XDG base directory standard, "
"this configuration location has been deprecated "
"on Linux, and the new location is now %s/matplotlib/. "
"Please move your configuration there to ensure that "
"matplotlib will continue to find it in the future." %
_get_xdg_config_dir())
return os.path.join(
home, '.matplotlib', 'matplotlibrc')
return fname
path = get_data_path() # guaranteed to exist or raise
fname = os.path.join(path, 'matplotlibrc')
if not os.path.exists(fname):
warnings.warn('Could not find matplotlibrc; using defaults')
return fname
_deprecated_map = {
'text.fontstyle': ('font.style', lambda x: x),
'text.fontangle': ('font.style', lambda x: x),
'text.fontvariant': ('font.variant', lambda x: x),
'text.fontweight': ('font.weight', lambda x: x),
'text.fontsize': ('font.size', lambda x: x),
'tick.size': ('tick.major.size', lambda x: x),
'svg.embed_char_paths': ('svg.fonttype',
lambda x: "path" if x else "none"),
'savefig.extension': ('savefig.format', lambda x: x),
}
_deprecated_ignore_map = {
}
_obsolete_set = set(['tk.pythoninspect', ])
_all_deprecated = set(chain(_deprecated_ignore_map,
_deprecated_map, _obsolete_set))
class RcParams(dict):
"""
A dictionary object including validation
validating functions are defined and associated with rc parameters in
:mod:`matplotlib.rcsetup`
"""
validate = dict((key, converter) for key, (default, converter) in
six.iteritems(defaultParams)
if key not in _all_deprecated)
msg_depr = "%s is deprecated and replaced with %s; please use the latter."
msg_depr_ignore = "%s is deprecated and ignored. Use %s"
# validate values on the way in
def __init__(self, *args, **kwargs):
for k, v in six.iteritems(dict(*args, **kwargs)):
self[k] = v
def __setitem__(self, key, val):
try:
if key in _deprecated_map:
alt_key, alt_val = _deprecated_map[key]
warnings.warn(self.msg_depr % (key, alt_key))
key = alt_key
val = alt_val(val)
elif key in _deprecated_ignore_map:
alt = _deprecated_ignore_map[key]
warnings.warn(self.msg_depr_ignore % (key, alt))
return
try:
cval = self.validate[key](val)
except ValueError as ve:
raise ValueError("Key %s: %s" % (key, str(ve)))
dict.__setitem__(self, key, cval)
except KeyError:
raise KeyError('%s is not a valid rc parameter.\
See rcParams.keys() for a list of valid parameters.' % (key,))
def __getitem__(self, key):
if key in _deprecated_map:
alt_key, alt_val = _deprecated_map[key]
warnings.warn(self.msg_depr % (key, alt_key))
key = alt_key
elif key in _deprecated_ignore_map:
alt = _deprecated_ignore_map[key]
warnings.warn(self.msg_depr_ignore % (key, alt))
key = alt
return dict.__getitem__(self, key)
# http://stackoverflow.com/questions/2390827/how-to-properly-subclass-dict-and-override-get-set
# the default dict `update` does not use __setitem__
# so rcParams.update(...) (such as in seaborn) side-steps
# all of the validation over-ride update to force
# through __setitem__
def update(self, *args, **kwargs):
for k, v in six.iteritems(dict(*args, **kwargs)):
self[k] = v
def __repr__(self):
import pprint
class_name = self.__class__.__name__
indent = len(class_name) + 1
repr_split = pprint.pformat(dict(self), indent=1,
width=80 - indent).split('\n')
repr_indented = ('\n' + ' ' * indent).join(repr_split)
return '{0}({1})'.format(class_name, repr_indented)
def __str__(self):
return '\n'.join('{0}: {1}'.format(k, v)
for k, v in sorted(self.items()))
def keys(self):
"""
Return sorted list of keys.
"""
k = list(dict.keys(self))
k.sort()
return k
def values(self):
"""
Return values in order of sorted keys.
"""
return [self[k] for k in self.keys()]
def find_all(self, pattern):
"""
Return the subset of this RcParams dictionary whose keys match,
using :func:`re.search`, the given ``pattern``.
.. note::
Changes to the returned dictionary are *not* propagated to
the parent RcParams dictionary.
"""
import re
pattern_re = re.compile(pattern)
return RcParams((key, value)
for key, value in self.items()
if pattern_re.search(key))
def rc_params(fail_on_error=False):
"""Return a :class:`matplotlib.RcParams` instance from the
default matplotlib rc file.
"""
fname = matplotlib_fname()
if not os.path.exists(fname):
# this should never happen, default in mpl-data should always be found
message = 'could not find rc file; returning defaults'
ret = RcParams([(key, default) for key, (default, _) in
six.iteritems(defaultParams)
if key not in _all_deprecated])
warnings.warn(message)
return ret
return rc_params_from_file(fname, fail_on_error)
URL_REGEX = re.compile(r'http://|https://|ftp://|file://|file:\\')
def is_url(filename):
"""Return True if string is an http, ftp, or file URL path."""
return URL_REGEX.match(filename) is not None
def _url_lines(f):
# Compatibility for urlopen in python 3, which yields bytes.
for line in f:
yield line.decode('utf8')
@contextlib.contextmanager
def _open_file_or_url(fname):
if is_url(fname):
f = urlopen(fname)
yield _url_lines(f)
f.close()
else:
with io.open(fname, encoding=locale.getdefaultlocale()[1]) as f:
yield f
_error_details_fmt = 'line #%d\n\t"%s"\n\tin file "%s"'
def _rc_params_in_file(fname, fail_on_error=False):
"""Return :class:`matplotlib.RcParams` from the contents of the given file.
Unlike `rc_params_from_file`, the configuration class only contains the
parameters specified in the file (i.e. default values are not filled in).
"""
cnt = 0
rc_temp = {}
with _open_file_or_url(fname) as fd:
for line in fd:
cnt += 1
strippedline = line.split('#', 1)[0].strip()
if not strippedline:
continue
tup = strippedline.split(':', 1)
if len(tup) != 2:
error_details = _error_details_fmt % (cnt, line, fname)
warnings.warn('Illegal %s' % error_details)
continue
key, val = tup
key = key.strip()
val = val.strip()
if key in rc_temp:
warnings.warn('Duplicate key in file "%s", line #%d' %
(fname, cnt))
rc_temp[key] = (val, line, cnt)
config = RcParams()
for key in ('verbose.level', 'verbose.fileo'):
if key in rc_temp:
val, line, cnt = rc_temp.pop(key)
if fail_on_error:
config[key] = val # try to convert to proper type or raise
else:
try:
config[key] = val # try to convert to proper type or skip
except Exception as msg:
error_details = _error_details_fmt % (cnt, line, fname)
warnings.warn('Bad val "%s" on %s\n\t%s' %
(val, error_details, msg))
for key, (val, line, cnt) in six.iteritems(rc_temp):
if key in defaultParams:
if fail_on_error:
config[key] = val # try to convert to proper type or raise
else:
try:
config[key] = val # try to convert to proper type or skip
except Exception as msg:
error_details = _error_details_fmt % (cnt, line, fname)
warnings.warn('Bad val "%s" on %s\n\t%s' %
(val, error_details, msg))
elif key in _deprecated_ignore_map:
warnings.warn('%s is deprecated. Update your matplotlibrc to use '
'%s instead.' % (key, _deprecated_ignore_map[key]))
else:
print("""
Bad key "%s" on line %d in
%s.
You probably need to get an updated matplotlibrc file from
http://matplotlib.sf.net/_static/matplotlibrc or from the matplotlib source
distribution""" % (key, cnt, fname), file=sys.stderr)
return config
def rc_params_from_file(fname, fail_on_error=False, use_default_template=True):
"""Return :class:`matplotlib.RcParams` from the contents of the given file.
Parameters
----------
fname : str
Name of file parsed for matplotlib settings.
fail_on_error : bool
If True, raise an error when the parser fails to convert a parameter.
use_default_template : bool
If True, initialize with default parameters before updating with those
in the given file. If False, the configuration class only contains the
parameters specified in the file. (Useful for updating dicts.)
"""
config_from_file = _rc_params_in_file(fname, fail_on_error)
if not use_default_template:
return config_from_file
iter_params = six.iteritems(defaultParams)
config = RcParams([(key, default) for key, (default, _) in iter_params
if key not in _all_deprecated])
config.update(config_from_file)
verbose.set_level(config['verbose.level'])
verbose.set_fileo(config['verbose.fileo'])
if config['datapath'] is None:
config['datapath'] = get_data_path()
if not config['text.latex.preamble'] == ['']:
verbose.report("""
*****************************************************************
You have the following UNSUPPORTED LaTeX preamble customizations:
%s
Please do not ask for support with these customizations active.
*****************************************************************
""" % '\n'.join(config['text.latex.preamble']), 'helpful')
verbose.report('loaded rc file %s' % fname)
return config
# this is the instance used by the matplotlib classes
rcParams = rc_params()
if rcParams['examples.directory']:
# paths that are intended to be relative to matplotlib_fname()
# are allowed for the examples.directory parameter.
# However, we will need to fully qualify the path because
# Sphinx requires absolute paths.
if not os.path.isabs(rcParams['examples.directory']):
_basedir, _fname = os.path.split(matplotlib_fname())
# Sometimes matplotlib_fname() can return relative paths,
# Also, using realpath() guarentees that Sphinx will use
# the same path that matplotlib sees (in case of weird symlinks).
_basedir = os.path.realpath(_basedir)
_fullpath = os.path.join(_basedir, rcParams['examples.directory'])
rcParams['examples.directory'] = _fullpath
rcParamsOrig = rcParams.copy()
rcParamsDefault = RcParams([(key, default) for key, (default, converter) in
six.iteritems(defaultParams)
if key not in _all_deprecated])
rcParams['ps.usedistiller'] = checkdep_ps_distiller(
rcParams['ps.usedistiller'])
rcParams['text.usetex'] = checkdep_usetex(rcParams['text.usetex'])
if rcParams['axes.formatter.use_locale']:
import locale
locale.setlocale(locale.LC_ALL, '')
def rc(group, **kwargs):
"""
Set the current rc params. Group is the grouping for the rc, e.g.,
for ``lines.linewidth`` the group is ``lines``, for
``axes.facecolor``, the group is ``axes``, and so on. Group may
also be a list or tuple of group names, e.g., (*xtick*, *ytick*).
*kwargs* is a dictionary attribute name/value pairs, e.g.,::
rc('lines', linewidth=2, color='r')
sets the current rc params and is equivalent to::
rcParams['lines.linewidth'] = 2
rcParams['lines.color'] = 'r'
The following aliases are available to save typing for interactive
users:
===== =================
Alias Property
===== =================
'lw' 'linewidth'
'ls' 'linestyle'
'c' 'color'
'fc' 'facecolor'
'ec' 'edgecolor'
'mew' 'markeredgewidth'
'aa' 'antialiased'
===== =================
Thus you could abbreviate the above rc command as::
rc('lines', lw=2, c='r')
Note you can use python's kwargs dictionary facility to store
dictionaries of default parameters. e.g., you can customize the
font rc as follows::
font = {'family' : 'monospace',
'weight' : 'bold',
'size' : 'larger'}
rc('font', **font) # pass in the font dict as kwargs
This enables you to easily switch between several configurations.
Use :func:`~matplotlib.pyplot.rcdefaults` to restore the default
rc params after changes.
"""
aliases = {
'lw': 'linewidth',
'ls': 'linestyle',
'c': 'color',
'fc': 'facecolor',
'ec': 'edgecolor',
'mew': 'markeredgewidth',
'aa': 'antialiased',
}
if is_string_like(group):
group = (group,)
for g in group:
for k, v in six.iteritems(kwargs):
name = aliases.get(k) or k
key = '%s.%s' % (g, name)
try:
rcParams[key] = v
except KeyError:
raise KeyError(('Unrecognized key "%s" for group "%s" and '
'name "%s"') % (key, g, name))
def rcdefaults():
"""
Restore the default rc params. These are not the params loaded by
the rc file, but mpl's internal params. See rc_file_defaults for
reloading the default params from the rc file
"""
rcParams.clear()
rcParams.update(rcParamsDefault)
def rc_file(fname):
"""
Update rc params from file.
"""
rcParams.update(rc_params_from_file(fname))
class rc_context(object):
"""
Return a context manager for managing rc settings.
This allows one to do::
with mpl.rc_context(fname='screen.rc'):
plt.plot(x, a)
with mpl.rc_context(fname='print.rc'):
plt.plot(x, b)
plt.plot(x, c)
The 'a' vs 'x' and 'c' vs 'x' plots would have settings from
'screen.rc', while the 'b' vs 'x' plot would have settings from
'print.rc'.
A dictionary can also be passed to the context manager::
with mpl.rc_context(rc={'text.usetex': True}, fname='screen.rc'):
plt.plot(x, a)
The 'rc' dictionary takes precedence over the settings loaded from
'fname'. Passing a dictionary only is also valid.
"""
def __init__(self, rc=None, fname=None):
self.rcdict = rc
self.fname = fname
self._rcparams = rcParams.copy()
try:
if self.fname:
rc_file(self.fname)
if self.rcdict:
rcParams.update(self.rcdict)
except:
# if anything goes wrong, revert rc parameters and re-raise
rcParams.clear()
rcParams.update(self._rcparams)
raise
def __enter__(self):
return self
def __exit__(self, type, value, tb):
rcParams.update(self._rcparams)
def rc_file_defaults():
"""
Restore the default rc params from the original matplotlib rc that
was loaded
"""
rcParams.update(rcParamsOrig)
_use_error_msg = """ This call to matplotlib.use() has no effect
because the backend has already been chosen;
matplotlib.use() must be called *before* pylab, matplotlib.pyplot,
or matplotlib.backends is imported for the first time.
"""
def use(arg, warn=True, force=False):
"""
Set the matplotlib backend to one of the known backends.
The argument is case-insensitive. *warn* specifies whether a
warning should be issued if a backend has already been set up.
*force* is an **experimental** flag that tells matplotlib to
attempt to initialize a new backend by reloading the backend
module.
.. note::
This function must be called *before* importing pyplot for
the first time; or, if you are not using pyplot, it must be called
before importing matplotlib.backends. If warn is True, a warning
is issued if you try and call this after pylab or pyplot have been
loaded. In certain black magic use cases, e.g.
:func:`pyplot.switch_backend`, we are doing the reloading necessary to
make the backend switch work (in some cases, e.g., pure image
backends) so one can set warn=False to suppress the warnings.
To find out which backend is currently set, see
:func:`matplotlib.get_backend`.
"""
# Lets determine the proper backend name first
if arg.startswith('module://'):
name = arg
else:
# Lowercase only non-module backend names (modules are case-sensitive)
arg = arg.lower()
name = validate_backend(arg)
# Check if we've already set up a backend
if 'matplotlib.backends' in sys.modules:
# Warn only if called with a different name
if (rcParams['backend'] != name) and warn:
warnings.warn(_use_error_msg)
# Unless we've been told to force it, just return
if not force:
return
need_reload = True
else:
need_reload = False
# Store the backend name
rcParams['backend'] = name
# If needed we reload here because a lot of setup code is triggered on
# module import. See backends/__init__.py for more detail.
if need_reload:
reload(sys.modules['matplotlib.backends'])
def get_backend():
"""Return the name of the current backend."""
return rcParams['backend']
def interactive(b):
"""
Set interactive mode to boolean b.
If b is True, then draw after every plotting command, e.g., after xlabel
"""
rcParams['interactive'] = b
def is_interactive():
'Return true if plot mode is interactive'
return rcParams['interactive']
def tk_window_focus():
"""Return true if focus maintenance under TkAgg on win32 is on.
This currently works only for python.exe and IPython.exe.
Both IDLE and Pythonwin.exe fail badly when tk_window_focus is on."""
if rcParams['backend'] != 'TkAgg':
return False
return rcParams['tk.window_focus']
# Now allow command line to override
# Allow command line access to the backend with -d (MATLAB compatible
# flag)
for s in sys.argv[1:]:
# cast to str because we are using unicode_literals,
# and argv is always str
if s.startswith(str('-d')) and len(s) > 2: # look for a -d flag
try:
use(s[2:])
except (KeyError, ValueError):
pass
# we don't want to assume all -d flags are backends, e.g., -debug
default_test_modules = [
'matplotlib.tests.test_agg',
'matplotlib.tests.test_animation',
'matplotlib.tests.test_arrow_patches',
'matplotlib.tests.test_artist',
'matplotlib.tests.test_axes',
'matplotlib.tests.test_axes_grid1',
'matplotlib.tests.test_backend_bases',
'matplotlib.tests.test_backend_pdf',
'matplotlib.tests.test_backend_pgf',
'matplotlib.tests.test_backend_ps',
'matplotlib.tests.test_backend_qt4',
'matplotlib.tests.test_backend_svg',
'matplotlib.tests.test_basic',
'matplotlib.tests.test_bbox_tight',
'matplotlib.tests.test_cbook',
'matplotlib.tests.test_coding_standards',
'matplotlib.tests.test_collections',
'matplotlib.tests.test_colorbar',
'matplotlib.tests.test_colors',
'matplotlib.tests.test_compare_images',
'matplotlib.tests.test_contour',
'matplotlib.tests.test_dates',
'matplotlib.tests.test_delaunay',
'matplotlib.tests.test_figure',
'matplotlib.tests.test_font_manager',
'matplotlib.tests.test_gridspec',
'matplotlib.tests.test_image',
'matplotlib.tests.test_legend',
'matplotlib.tests.test_lines',
'matplotlib.tests.test_mathtext',
'matplotlib.tests.test_mlab',
'matplotlib.tests.test_patches',
'matplotlib.tests.test_path',
'matplotlib.tests.test_patheffects',
'matplotlib.tests.test_pickle',
'matplotlib.tests.test_png',
'matplotlib.tests.test_quiver',
'matplotlib.tests.test_rcparams',
'matplotlib.tests.test_scale',
'matplotlib.tests.test_simplification',
'matplotlib.tests.test_spines',
'matplotlib.tests.test_streamplot',
'matplotlib.tests.test_style',
'matplotlib.tests.test_subplots',
'matplotlib.tests.test_table',
'matplotlib.tests.test_text',
'matplotlib.tests.test_ticker',
'matplotlib.tests.test_tightlayout',
'matplotlib.tests.test_transforms',
'matplotlib.tests.test_triangulation',
'mpl_toolkits.tests.test_mplot3d',
'matplotlib.tests.test_widgets',
]
def test(verbosity=1):
"""run the matplotlib test suite"""
try:
import faulthandler
except ImportError:
pass
else:
faulthandler.enable()
old_backend = rcParams['backend']
try:
use('agg')
import nose
import nose.plugins.builtin
from .testing.noseclasses import KnownFailure
from nose.plugins.manager import PluginManager
from nose.plugins import multiprocess
# store the old values before overriding
plugins = []
plugins.append(KnownFailure())
plugins.extend([plugin() for plugin in nose.plugins.builtin.plugins])
manager = PluginManager(plugins=plugins)
config = nose.config.Config(verbosity=verbosity, plugins=manager)
# Nose doesn't automatically instantiate all of the plugins in the
# child processes, so we have to provide the multiprocess plugin with
# a list.
multiprocess._instantiate_plugins = [KnownFailure]
success = nose.run(
defaultTest=default_test_modules,
config=config,
)
finally:
if old_backend.lower() != 'agg':
use(old_backend)
return success
test.__test__ = False # nose: this function is not a test
verbose.report('matplotlib version %s' % __version__)
verbose.report('verbose.level %s' % verbose.level)
verbose.report('interactive is %s' % is_interactive())
verbose.report('platform is %s' % sys.platform)
verbose.report('loaded modules: %s' % six.iterkeys(sys.modules), 'debug')
| 32.080863 | 99 | 0.612733 |
c817c725b1d8605eeb9b7b3faf979c8a9e1060b8 | 67,987 | py | Python | test/unit/common/test_utils.py | ericwanghp/swift | 71a20d04ccac24b37b23fba87063e74e802c08fc | [
"Apache-2.0"
] | 1 | 2021-04-18T15:23:18.000Z | 2021-04-18T15:23:18.000Z | test/unit/common/test_utils.py | Triv90/SwiftUml | 98fa5adfe5664dbb4f328ba2a1789a63c7550eed | [
"Apache-2.0"
] | null | null | null | test/unit/common/test_utils.py | Triv90/SwiftUml | 98fa5adfe5664dbb4f328ba2a1789a63c7550eed | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Tests for swift.common.utils """
from __future__ import with_statement
from test.unit import temptree
import ctypes
import errno
import logging
import mimetools
import os
import random
import re
import socket
import sys
import time
import unittest
from threading import Thread
from Queue import Queue, Empty
from getpass import getuser
from shutil import rmtree
from StringIO import StringIO
from functools import partial
from tempfile import TemporaryFile, NamedTemporaryFile
from logging import handlers as logging_handlers
from eventlet import sleep
from mock import patch
from swift.common.exceptions import (Timeout, MessageTimeout,
ConnectionTimeout)
from swift.common import utils
from swift.common.swob import Response
class MockOs():
def __init__(self, pass_funcs=[], called_funcs=[], raise_funcs=[]):
self.closed_fds = []
for func in pass_funcs:
setattr(self, func, self.pass_func)
self.called_funcs = {}
for func in called_funcs:
c_func = partial(self.called_func, func)
setattr(self, func, c_func)
for func in raise_funcs:
r_func = partial(self.raise_func, func)
setattr(self, func, r_func)
def pass_func(self, *args, **kwargs):
pass
setgroups = chdir = setsid = setgid = setuid = umask = pass_func
def called_func(self, name, *args, **kwargs):
self.called_funcs[name] = True
def raise_func(self, name, *args, **kwargs):
self.called_funcs[name] = True
raise OSError()
def dup2(self, source, target):
self.closed_fds.append(target)
def geteuid(self):
'''Pretend we are running as root.'''
return 0
def __getattr__(self, name):
# I only over-ride portions of the os module
try:
return object.__getattr__(self, name)
except AttributeError:
return getattr(os, name)
class MockUdpSocket():
def __init__(self):
self.sent = []
def sendto(self, data, target):
self.sent.append((data, target))
def close(self):
pass
class MockSys():
def __init__(self):
self.stdin = TemporaryFile('w')
self.stdout = TemporaryFile('r')
self.stderr = TemporaryFile('r')
self.__stderr__ = self.stderr
self.stdio_fds = [self.stdin.fileno(), self.stdout.fileno(),
self.stderr.fileno()]
def reset_loggers():
if hasattr(utils.get_logger, 'handler4logger'):
for logger, handler in utils.get_logger.handler4logger.items():
logger.thread_locals = (None, None)
logger.removeHandler(handler)
delattr(utils.get_logger, 'handler4logger')
if hasattr(utils.get_logger, 'console_handler4logger'):
for logger, h in utils.get_logger.console_handler4logger.items():
logger.thread_locals = (None, None)
logger.removeHandler(h)
delattr(utils.get_logger, 'console_handler4logger')
class TestUtils(unittest.TestCase):
""" Tests for swift.common.utils """
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
def test_normalize_timestamp(self):
""" Test swift.common.utils.normalize_timestamp """
self.assertEquals(utils.normalize_timestamp('1253327593.48174'),
"1253327593.48174")
self.assertEquals(utils.normalize_timestamp(1253327593.48174),
"1253327593.48174")
self.assertEquals(utils.normalize_timestamp('1253327593.48'),
"1253327593.48000")
self.assertEquals(utils.normalize_timestamp(1253327593.48),
"1253327593.48000")
self.assertEquals(utils.normalize_timestamp('253327593.48'),
"0253327593.48000")
self.assertEquals(utils.normalize_timestamp(253327593.48),
"0253327593.48000")
self.assertEquals(utils.normalize_timestamp('1253327593'),
"1253327593.00000")
self.assertEquals(utils.normalize_timestamp(1253327593),
"1253327593.00000")
self.assertRaises(ValueError, utils.normalize_timestamp, '')
self.assertRaises(ValueError, utils.normalize_timestamp, 'abc')
def test_backwards(self):
""" Test swift.common.utils.backward """
# The lines are designed so that the function would encounter
# all of the boundary conditions and typical conditions.
# Block boundaries are marked with '<>' characters
blocksize = 25
lines = ['123456789x12345678><123456789\n', # block larger than rest
'123456789x123>\n', # block ends just before \n character
'123423456789\n',
'123456789x\n', # block ends at the end of line
'<123456789x123456789x123\n',
'<6789x123\n', # block ends at the beginning of the line
'6789x1234\n',
'1234><234\n', # block ends typically in the middle of line
'123456789x123456789\n']
with TemporaryFile('r+w') as f:
for line in lines:
f.write(line)
count = len(lines) - 1
for line in utils.backward(f, blocksize):
self.assertEquals(line, lines[count].split('\n')[0])
count -= 1
# Empty file case
with TemporaryFile('r') as f:
self.assertEquals([], list(utils.backward(f)))
def test_mkdirs(self):
testroot = os.path.join(os.path.dirname(__file__), 'mkdirs')
try:
os.unlink(testroot)
except Exception:
pass
rmtree(testroot, ignore_errors=1)
self.assert_(not os.path.exists(testroot))
utils.mkdirs(testroot)
self.assert_(os.path.exists(testroot))
utils.mkdirs(testroot)
self.assert_(os.path.exists(testroot))
rmtree(testroot, ignore_errors=1)
testdir = os.path.join(testroot, 'one/two/three')
self.assert_(not os.path.exists(testdir))
utils.mkdirs(testdir)
self.assert_(os.path.exists(testdir))
utils.mkdirs(testdir)
self.assert_(os.path.exists(testdir))
rmtree(testroot, ignore_errors=1)
open(testroot, 'wb').close()
self.assert_(not os.path.exists(testdir))
self.assertRaises(OSError, utils.mkdirs, testdir)
os.unlink(testroot)
def test_split_path(self):
""" Test swift.common.utils.split_account_path """
self.assertRaises(ValueError, utils.split_path, '')
self.assertRaises(ValueError, utils.split_path, '/')
self.assertRaises(ValueError, utils.split_path, '//')
self.assertEquals(utils.split_path('/a'), ['a'])
self.assertRaises(ValueError, utils.split_path, '//a')
self.assertEquals(utils.split_path('/a/'), ['a'])
self.assertRaises(ValueError, utils.split_path, '/a/c')
self.assertRaises(ValueError, utils.split_path, '//c')
self.assertRaises(ValueError, utils.split_path, '/a/c/')
self.assertRaises(ValueError, utils.split_path, '/a//')
self.assertRaises(ValueError, utils.split_path, '/a', 2)
self.assertRaises(ValueError, utils.split_path, '/a', 2, 3)
self.assertRaises(ValueError, utils.split_path, '/a', 2, 3, True)
self.assertEquals(utils.split_path('/a/c', 2), ['a', 'c'])
self.assertEquals(utils.split_path('/a/c/o', 3), ['a', 'c', 'o'])
self.assertRaises(ValueError, utils.split_path, '/a/c/o/r', 3, 3)
self.assertEquals(utils.split_path('/a/c/o/r', 3, 3, True),
['a', 'c', 'o/r'])
self.assertEquals(utils.split_path('/a/c', 2, 3, True),
['a', 'c', None])
self.assertRaises(ValueError, utils.split_path, '/a', 5, 4)
self.assertEquals(utils.split_path('/a/c/', 2), ['a', 'c'])
self.assertEquals(utils.split_path('/a/c/', 2, 3), ['a', 'c', ''])
try:
utils.split_path('o\nn e', 2)
except ValueError, err:
self.assertEquals(str(err), 'Invalid path: o%0An%20e')
try:
utils.split_path('o\nn e', 2, 3, True)
except ValueError, err:
self.assertEquals(str(err), 'Invalid path: o%0An%20e')
def test_validate_device_partition(self):
""" Test swift.common.utils.validate_device_partition """
utils.validate_device_partition('foo', 'bar')
self.assertRaises(ValueError,
utils.validate_device_partition, '', '')
self.assertRaises(ValueError,
utils.validate_device_partition, '', 'foo')
self.assertRaises(ValueError,
utils.validate_device_partition, 'foo', '')
self.assertRaises(ValueError,
utils.validate_device_partition, 'foo/bar', 'foo')
self.assertRaises(ValueError,
utils.validate_device_partition, 'foo', 'foo/bar')
self.assertRaises(ValueError,
utils.validate_device_partition, '.', 'foo')
self.assertRaises(ValueError,
utils.validate_device_partition, '..', 'foo')
self.assertRaises(ValueError,
utils.validate_device_partition, 'foo', '.')
self.assertRaises(ValueError,
utils.validate_device_partition, 'foo', '..')
try:
utils.validate_device_partition('o\nn e', 'foo')
except ValueError, err:
self.assertEquals(str(err), 'Invalid device: o%0An%20e')
try:
utils.validate_device_partition('foo', 'o\nn e')
except ValueError, err:
self.assertEquals(str(err), 'Invalid partition: o%0An%20e')
def test_NullLogger(self):
""" Test swift.common.utils.NullLogger """
sio = StringIO()
nl = utils.NullLogger()
nl.write('test')
self.assertEquals(sio.getvalue(), '')
def test_LoggerFileObject(self):
orig_stdout = sys.stdout
orig_stderr = sys.stderr
sio = StringIO()
handler = logging.StreamHandler(sio)
logger = logging.getLogger()
logger.addHandler(handler)
lfo = utils.LoggerFileObject(logger)
print 'test1'
self.assertEquals(sio.getvalue(), '')
sys.stdout = lfo
print 'test2'
self.assertEquals(sio.getvalue(), 'STDOUT: test2\n')
sys.stderr = lfo
print >> sys.stderr, 'test4'
self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n')
sys.stdout = orig_stdout
print 'test5'
self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n')
print >> sys.stderr, 'test6'
self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n'
'STDOUT: test6\n')
sys.stderr = orig_stderr
print 'test8'
self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n'
'STDOUT: test6\n')
lfo.writelines(['a', 'b', 'c'])
self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n'
'STDOUT: test6\nSTDOUT: a#012b#012c\n')
lfo.close()
lfo.write('d')
self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n'
'STDOUT: test6\nSTDOUT: a#012b#012c\nSTDOUT: d\n')
lfo.flush()
self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDOUT: test4\n'
'STDOUT: test6\nSTDOUT: a#012b#012c\nSTDOUT: d\n')
got_exc = False
try:
for line in lfo:
pass
except Exception:
got_exc = True
self.assert_(got_exc)
got_exc = False
try:
for line in lfo.xreadlines():
pass
except Exception:
got_exc = True
self.assert_(got_exc)
self.assertRaises(IOError, lfo.read)
self.assertRaises(IOError, lfo.read, 1024)
self.assertRaises(IOError, lfo.readline)
self.assertRaises(IOError, lfo.readline, 1024)
lfo.tell()
def test_parse_options(self):
# use mkstemp to get a file that is definitely on disk
with NamedTemporaryFile() as f:
conf_file = f.name
conf, options = utils.parse_options(test_args=[conf_file])
self.assertEquals(conf, conf_file)
# assert defaults
self.assertEquals(options['verbose'], False)
self.assert_('once' not in options)
# assert verbose as option
conf, options = utils.parse_options(test_args=[conf_file, '-v'])
self.assertEquals(options['verbose'], True)
# check once option
conf, options = utils.parse_options(test_args=[conf_file],
once=True)
self.assertEquals(options['once'], False)
test_args = [conf_file, '--once']
conf, options = utils.parse_options(test_args=test_args, once=True)
self.assertEquals(options['once'], True)
# check options as arg parsing
test_args = [conf_file, 'once', 'plugin_name', 'verbose']
conf, options = utils.parse_options(test_args=test_args, once=True)
self.assertEquals(options['verbose'], True)
self.assertEquals(options['once'], True)
self.assertEquals(options['extra_args'], ['plugin_name'])
def test_parse_options_errors(self):
orig_stdout = sys.stdout
orig_stderr = sys.stderr
stdo = StringIO()
stde = StringIO()
utils.sys.stdout = stdo
utils.sys.stderr = stde
self.assertRaises(SystemExit, utils.parse_options, once=True,
test_args=[])
self.assert_('missing config file' in stdo.getvalue())
# verify conf file must exist, context manager will delete temp file
with NamedTemporaryFile() as f:
conf_file = f.name
self.assertRaises(SystemExit, utils.parse_options, once=True,
test_args=[conf_file])
self.assert_('unable to locate' in stdo.getvalue())
# reset stdio
utils.sys.stdout = orig_stdout
utils.sys.stderr = orig_stderr
def test_get_logger(self):
sio = StringIO()
logger = logging.getLogger('server')
logger.addHandler(logging.StreamHandler(sio))
logger = utils.get_logger(None, 'server', log_route='server')
logger.warn('test1')
self.assertEquals(sio.getvalue(), 'test1\n')
logger.debug('test2')
self.assertEquals(sio.getvalue(), 'test1\n')
logger = utils.get_logger({'log_level': 'DEBUG'}, 'server',
log_route='server')
logger.debug('test3')
self.assertEquals(sio.getvalue(), 'test1\ntest3\n')
# Doesn't really test that the log facility is truly being used all the
# way to syslog; but exercises the code.
logger = utils.get_logger({'log_facility': 'LOG_LOCAL3'}, 'server',
log_route='server')
logger.warn('test4')
self.assertEquals(sio.getvalue(),
'test1\ntest3\ntest4\n')
# make sure debug doesn't log by default
logger.debug('test5')
self.assertEquals(sio.getvalue(),
'test1\ntest3\ntest4\n')
# make sure notice lvl logs by default
logger.notice('test6')
self.assertEquals(sio.getvalue(),
'test1\ntest3\ntest4\ntest6\n')
def test_get_logger_sysloghandler_plumbing(self):
orig_sysloghandler = utils.SysLogHandler
syslog_handler_args = []
def syslog_handler_catcher(*args, **kwargs):
syslog_handler_args.append((args, kwargs))
return orig_sysloghandler(*args, **kwargs)
syslog_handler_catcher.LOG_LOCAL0 = orig_sysloghandler.LOG_LOCAL0
syslog_handler_catcher.LOG_LOCAL3 = orig_sysloghandler.LOG_LOCAL3
try:
utils.SysLogHandler = syslog_handler_catcher
logger = utils.get_logger({
'log_facility': 'LOG_LOCAL3',
}, 'server', log_route='server')
self.assertEquals([
((), {'address': '/dev/log',
'facility': orig_sysloghandler.LOG_LOCAL3})],
syslog_handler_args)
syslog_handler_args = []
logger = utils.get_logger({
'log_facility': 'LOG_LOCAL3',
'log_address': '/foo/bar',
}, 'server', log_route='server')
self.assertEquals([
((), {'address': '/foo/bar',
'facility': orig_sysloghandler.LOG_LOCAL3}),
# Second call is because /foo/bar didn't exist (and wasn't a
# UNIX domain socket).
((), {'facility': orig_sysloghandler.LOG_LOCAL3})],
syslog_handler_args)
# Using UDP with default port
syslog_handler_args = []
logger = utils.get_logger({
'log_udp_host': 'syslog.funtimes.com',
}, 'server', log_route='server')
self.assertEquals([
((), {'address': ('syslog.funtimes.com',
logging.handlers.SYSLOG_UDP_PORT),
'facility': orig_sysloghandler.LOG_LOCAL0})],
syslog_handler_args)
# Using UDP with non-default port
syslog_handler_args = []
logger = utils.get_logger({
'log_udp_host': 'syslog.funtimes.com',
'log_udp_port': '2123',
}, 'server', log_route='server')
self.assertEquals([
((), {'address': ('syslog.funtimes.com', 2123),
'facility': orig_sysloghandler.LOG_LOCAL0})],
syslog_handler_args)
finally:
utils.SysLogHandler = orig_sysloghandler
def test_clean_logger_exception(self):
# setup stream logging
sio = StringIO()
logger = utils.get_logger(None)
handler = logging.StreamHandler(sio)
logger.logger.addHandler(handler)
def strip_value(sio):
v = sio.getvalue()
sio.truncate(0)
return v
def log_exception(exc):
try:
raise exc
except (Exception, Timeout):
logger.exception('blah')
try:
# establish base case
self.assertEquals(strip_value(sio), '')
logger.info('test')
self.assertEquals(strip_value(sio), 'test\n')
self.assertEquals(strip_value(sio), '')
logger.info('test')
logger.info('test')
self.assertEquals(strip_value(sio), 'test\ntest\n')
self.assertEquals(strip_value(sio), '')
# test OSError
for en in (errno.EIO, errno.ENOSPC):
log_exception(OSError(en, 'my %s error message' % en))
log_msg = strip_value(sio)
self.assert_('Traceback' not in log_msg)
self.assert_('my %s error message' % en in log_msg)
# unfiltered
log_exception(OSError())
self.assert_('Traceback' in strip_value(sio))
# test socket.error
log_exception(socket.error(errno.ECONNREFUSED,
'my error message'))
log_msg = strip_value(sio)
self.assert_('Traceback' not in log_msg)
self.assert_('errno.ECONNREFUSED message test' not in log_msg)
self.assert_('Connection refused' in log_msg)
log_exception(socket.error(errno.EHOSTUNREACH,
'my error message'))
log_msg = strip_value(sio)
self.assert_('Traceback' not in log_msg)
self.assert_('my error message' not in log_msg)
self.assert_('Host unreachable' in log_msg)
log_exception(socket.error(errno.ETIMEDOUT, 'my error message'))
log_msg = strip_value(sio)
self.assert_('Traceback' not in log_msg)
self.assert_('my error message' not in log_msg)
self.assert_('Connection timeout' in log_msg)
# unfiltered
log_exception(socket.error(0, 'my error message'))
log_msg = strip_value(sio)
self.assert_('Traceback' in log_msg)
self.assert_('my error message' in log_msg)
# test eventlet.Timeout
log_exception(ConnectionTimeout(42, 'my error message'))
log_msg = strip_value(sio)
self.assert_('Traceback' not in log_msg)
self.assert_('ConnectionTimeout' in log_msg)
self.assert_('(42s)' in log_msg)
self.assert_('my error message' not in log_msg)
log_exception(MessageTimeout(42, 'my error message'))
log_msg = strip_value(sio)
self.assert_('Traceback' not in log_msg)
self.assert_('MessageTimeout' in log_msg)
self.assert_('(42s)' in log_msg)
self.assert_('my error message' in log_msg)
# test unhandled
log_exception(Exception('my error message'))
log_msg = strip_value(sio)
self.assert_('Traceback' in log_msg)
self.assert_('my error message' in log_msg)
finally:
logger.logger.removeHandler(handler)
reset_loggers()
def test_swift_log_formatter(self):
# setup stream logging
sio = StringIO()
logger = utils.get_logger(None)
handler = logging.StreamHandler(sio)
handler.setFormatter(utils.SwiftLogFormatter())
logger.logger.addHandler(handler)
def strip_value(sio):
v = sio.getvalue()
sio.truncate(0)
return v
try:
self.assertFalse(logger.txn_id)
logger.error('my error message')
log_msg = strip_value(sio)
self.assert_('my error message' in log_msg)
self.assert_('txn' not in log_msg)
logger.txn_id = '12345'
logger.error('test')
log_msg = strip_value(sio)
self.assert_('txn' in log_msg)
self.assert_('12345' in log_msg)
# test no txn on info message
self.assertEquals(logger.txn_id, '12345')
logger.info('test')
log_msg = strip_value(sio)
self.assert_('txn' not in log_msg)
self.assert_('12345' not in log_msg)
# test txn already in message
self.assertEquals(logger.txn_id, '12345')
logger.warn('test 12345 test')
self.assertEquals(strip_value(sio), 'test 12345 test\n')
# Test multi line collapsing
logger.error('my\nerror\nmessage')
log_msg = strip_value(sio)
self.assert_('my#012error#012message' in log_msg)
# test client_ip
self.assertFalse(logger.client_ip)
logger.error('my error message')
log_msg = strip_value(sio)
self.assert_('my error message' in log_msg)
self.assert_('client_ip' not in log_msg)
logger.client_ip = '1.2.3.4'
logger.error('test')
log_msg = strip_value(sio)
self.assert_('client_ip' in log_msg)
self.assert_('1.2.3.4' in log_msg)
# test no client_ip on info message
self.assertEquals(logger.client_ip, '1.2.3.4')
logger.info('test')
log_msg = strip_value(sio)
self.assert_('client_ip' not in log_msg)
self.assert_('1.2.3.4' not in log_msg)
# test client_ip (and txn) already in message
self.assertEquals(logger.client_ip, '1.2.3.4')
logger.warn('test 1.2.3.4 test 12345')
self.assertEquals(strip_value(sio), 'test 1.2.3.4 test 12345\n')
finally:
logger.logger.removeHandler(handler)
reset_loggers()
def test_storage_directory(self):
self.assertEquals(utils.storage_directory('objects', '1', 'ABCDEF'),
'objects/1/DEF/ABCDEF')
def test_whataremyips(self):
myips = utils.whataremyips()
self.assert_(len(myips) > 1)
self.assert_('127.0.0.1' in myips)
def test_hash_path(self):
# Yes, these tests are deliberately very fragile. We want to make sure
# that if someones changes the results hash_path produces, they know it
self.assertEquals(utils.hash_path('a'),
'1c84525acb02107ea475dcd3d09c2c58')
self.assertEquals(utils.hash_path('a', 'c'),
'33379ecb053aa5c9e356c68997cbb59e')
self.assertEquals(utils.hash_path('a', 'c', 'o'),
'06fbf0b514e5199dfc4e00f42eb5ea83')
self.assertEquals(utils.hash_path('a', 'c', 'o', raw_digest=False),
'06fbf0b514e5199dfc4e00f42eb5ea83')
self.assertEquals(utils.hash_path('a', 'c', 'o', raw_digest=True),
'\x06\xfb\xf0\xb5\x14\xe5\x19\x9d\xfcN'
'\x00\xf4.\xb5\xea\x83')
self.assertRaises(ValueError, utils.hash_path, 'a', object='o')
def test_load_libc_function(self):
self.assert_(callable(
utils.load_libc_function('printf')))
self.assert_(callable(
utils.load_libc_function('some_not_real_function')))
def test_readconf(self):
conf = '''[section1]
foo = bar
[section2]
log_name = yarr'''
# setup a real file
with open('/tmp/test', 'wb') as f:
f.write(conf)
make_filename = lambda: '/tmp/test'
# setup a file stream
make_fp = lambda: StringIO(conf)
for conf_object_maker in (make_filename, make_fp):
conffile = conf_object_maker()
result = utils.readconf(conffile)
expected = {'__file__': conffile,
'log_name': None,
'section1': {'foo': 'bar'},
'section2': {'log_name': 'yarr'}}
self.assertEquals(result, expected)
conffile = conf_object_maker()
result = utils.readconf(conffile, 'section1')
expected = {'__file__': conffile, 'log_name': 'section1',
'foo': 'bar'}
self.assertEquals(result, expected)
conffile = conf_object_maker()
result = utils.readconf(conffile,
'section2').get('log_name')
expected = 'yarr'
self.assertEquals(result, expected)
conffile = conf_object_maker()
result = utils.readconf(conffile, 'section1',
log_name='foo').get('log_name')
expected = 'foo'
self.assertEquals(result, expected)
conffile = conf_object_maker()
result = utils.readconf(conffile, 'section1',
defaults={'bar': 'baz'})
expected = {'__file__': conffile, 'log_name': 'section1',
'foo': 'bar', 'bar': 'baz'}
self.assertEquals(result, expected)
self.assertRaises(SystemExit, utils.readconf, '/tmp/test', 'section3')
os.unlink('/tmp/test')
self.assertRaises(SystemExit, utils.readconf, '/tmp/test')
def test_readconf_raw(self):
conf = '''[section1]
foo = bar
[section2]
log_name = %(yarr)s'''
# setup a real file
with open('/tmp/test', 'wb') as f:
f.write(conf)
make_filename = lambda: '/tmp/test'
# setup a file stream
make_fp = lambda: StringIO(conf)
for conf_object_maker in (make_filename, make_fp):
conffile = conf_object_maker()
result = utils.readconf(conffile, raw=True)
expected = {'__file__': conffile,
'log_name': None,
'section1': {'foo': 'bar'},
'section2': {'log_name': '%(yarr)s'}}
self.assertEquals(result, expected)
os.unlink('/tmp/test')
self.assertRaises(SystemExit, utils.readconf, '/tmp/test')
def test_drop_privileges(self):
user = getuser()
# over-ride os with mock
required_func_calls = ('setgroups', 'setgid', 'setuid', 'setsid',
'chdir', 'umask')
utils.os = MockOs(called_funcs=required_func_calls)
# exercise the code
utils.drop_privileges(user)
for func in required_func_calls:
self.assert_(utils.os.called_funcs[func])
import pwd
self.assertEquals(pwd.getpwnam(user)[5], utils.os.environ['HOME'])
# reset; test same args, OSError trying to get session leader
utils.os = MockOs(called_funcs=required_func_calls,
raise_funcs=('setsid',))
for func in required_func_calls:
self.assertFalse(utils.os.called_funcs.get(func, False))
utils.drop_privileges(user)
for func in required_func_calls:
self.assert_(utils.os.called_funcs[func])
def test_capture_stdio(self):
# stubs
logger = utils.get_logger(None, 'dummy')
# mock utils system modules
_orig_sys = utils.sys
_orig_os = utils.os
try:
utils.sys = MockSys()
utils.os = MockOs()
# basic test
utils.capture_stdio(logger)
self.assert_(utils.sys.excepthook is not None)
self.assertEquals(utils.os.closed_fds, utils.sys.stdio_fds)
self.assert_(isinstance(utils.sys.stdout, utils.LoggerFileObject))
self.assert_(isinstance(utils.sys.stderr, utils.LoggerFileObject))
# reset; test same args, but exc when trying to close stdio
utils.os = MockOs(raise_funcs=('dup2',))
utils.sys = MockSys()
# test unable to close stdio
utils.capture_stdio(logger)
self.assert_(utils.sys.excepthook is not None)
self.assertEquals(utils.os.closed_fds, [])
self.assert_(isinstance(utils.sys.stdout, utils.LoggerFileObject))
self.assert_(isinstance(utils.sys.stderr, utils.LoggerFileObject))
# reset; test some other args
utils.os = MockOs()
utils.sys = MockSys()
logger = utils.get_logger(None, log_to_console=True)
# test console log
utils.capture_stdio(logger, capture_stdout=False,
capture_stderr=False)
self.assert_(utils.sys.excepthook is not None)
# when logging to console, stderr remains open
self.assertEquals(utils.os.closed_fds, utils.sys.stdio_fds[:2])
reset_loggers()
# stdio not captured
self.assertFalse(isinstance(utils.sys.stdout,
utils.LoggerFileObject))
self.assertFalse(isinstance(utils.sys.stderr,
utils.LoggerFileObject))
reset_loggers()
finally:
utils.sys = _orig_sys
utils.os = _orig_os
def test_get_logger_console(self):
reset_loggers()
logger = utils.get_logger(None)
console_handlers = [h for h in logger.logger.handlers if
isinstance(h, logging.StreamHandler)]
self.assertFalse(console_handlers)
logger = utils.get_logger(None, log_to_console=True)
console_handlers = [h for h in logger.logger.handlers if
isinstance(h, logging.StreamHandler)]
self.assert_(console_handlers)
# make sure you can't have two console handlers
self.assertEquals(len(console_handlers), 1)
old_handler = console_handlers[0]
logger = utils.get_logger(None, log_to_console=True)
console_handlers = [h for h in logger.logger.handlers if
isinstance(h, logging.StreamHandler)]
self.assertEquals(len(console_handlers), 1)
new_handler = console_handlers[0]
self.assertNotEquals(new_handler, old_handler)
reset_loggers()
def test_ratelimit_sleep(self):
running_time = 0
start = time.time()
for i in range(100):
running_time = utils.ratelimit_sleep(running_time, 0)
self.assertTrue(abs((time.time() - start) * 100) < 1)
running_time = 0
start = time.time()
for i in range(50):
running_time = utils.ratelimit_sleep(running_time, 200)
# make sure it's accurate to 10th of a second
self.assertTrue(abs(25 - (time.time() - start) * 100) < 10)
def test_ratelimit_sleep_with_incr(self):
running_time = 0
start = time.time()
vals = [5, 17, 0, 3, 11, 30,
40, 4, 13, 2, -1] * 2 # adds up to 250 (with no -1)
total = 0
for i in vals:
running_time = utils.ratelimit_sleep(running_time,
500, incr_by=i)
total += i
self.assertTrue(abs(50 - (time.time() - start) * 100) < 10)
def test_urlparse(self):
parsed = utils.urlparse('http://127.0.0.1/')
self.assertEquals(parsed.scheme, 'http')
self.assertEquals(parsed.hostname, '127.0.0.1')
self.assertEquals(parsed.path, '/')
parsed = utils.urlparse('http://127.0.0.1:8080/')
self.assertEquals(parsed.port, 8080)
parsed = utils.urlparse('https://127.0.0.1/')
self.assertEquals(parsed.scheme, 'https')
parsed = utils.urlparse('http://[::1]/')
self.assertEquals(parsed.hostname, '::1')
parsed = utils.urlparse('http://[::1]:8080/')
self.assertEquals(parsed.hostname, '::1')
self.assertEquals(parsed.port, 8080)
parsed = utils.urlparse('www.example.com')
self.assertEquals(parsed.hostname, '')
def test_ratelimit_sleep_with_sleep(self):
running_time = 0
start = time.time()
sleeps = [0] * 7 + [.2] * 3 + [0] * 30
for i in sleeps:
running_time = utils.ratelimit_sleep(running_time, 40,
rate_buffer=1)
time.sleep(i)
# make sure it's accurate to 10th of a second
self.assertTrue(abs(100 - (time.time() - start) * 100) < 10)
def test_search_tree(self):
# file match & ext miss
with temptree(['asdf.conf', 'blarg.conf', 'asdf.cfg']) as t:
asdf = utils.search_tree(t, 'a*', '.conf')
self.assertEquals(len(asdf), 1)
self.assertEquals(asdf[0],
os.path.join(t, 'asdf.conf'))
# multi-file match & glob miss & sort
with temptree(['application.bin', 'apple.bin', 'apropos.bin']) as t:
app_bins = utils.search_tree(t, 'app*', 'bin')
self.assertEquals(len(app_bins), 2)
self.assertEquals(app_bins[0],
os.path.join(t, 'apple.bin'))
self.assertEquals(app_bins[1],
os.path.join(t, 'application.bin'))
# test file in folder & ext miss & glob miss
files = (
'sub/file1.ini',
'sub/file2.conf',
'sub.bin',
'bus.ini',
'bus/file3.ini',
)
with temptree(files) as t:
sub_ini = utils.search_tree(t, 'sub*', '.ini')
self.assertEquals(len(sub_ini), 1)
self.assertEquals(sub_ini[0],
os.path.join(t, 'sub/file1.ini'))
# test multi-file in folder & sub-folder & ext miss & glob miss
files = (
'folder_file.txt',
'folder/1.txt',
'folder/sub/2.txt',
'folder2/3.txt',
'Folder3/4.txt'
'folder.rc',
)
with temptree(files) as t:
folder_texts = utils.search_tree(t, 'folder*', '.txt')
self.assertEquals(len(folder_texts), 4)
f1 = os.path.join(t, 'folder_file.txt')
f2 = os.path.join(t, 'folder/1.txt')
f3 = os.path.join(t, 'folder/sub/2.txt')
f4 = os.path.join(t, 'folder2/3.txt')
for f in [f1, f2, f3, f4]:
self.assert_(f in folder_texts)
def test_write_file(self):
with temptree([]) as t:
file_name = os.path.join(t, 'test')
utils.write_file(file_name, 'test')
with open(file_name, 'r') as f:
contents = f.read()
self.assertEquals(contents, 'test')
# and also subdirs
file_name = os.path.join(t, 'subdir/test2')
utils.write_file(file_name, 'test2')
with open(file_name, 'r') as f:
contents = f.read()
self.assertEquals(contents, 'test2')
# but can't over-write files
file_name = os.path.join(t, 'subdir/test2/test3')
self.assertRaises(IOError, utils.write_file, file_name,
'test3')
def test_remove_file(self):
with temptree([]) as t:
file_name = os.path.join(t, 'blah.pid')
# assert no raise
self.assertEquals(os.path.exists(file_name), False)
self.assertEquals(utils.remove_file(file_name), None)
with open(file_name, 'w') as f:
f.write('1')
self.assert_(os.path.exists(file_name))
self.assertEquals(utils.remove_file(file_name), None)
self.assertFalse(os.path.exists(file_name))
def test_human_readable(self):
self.assertEquals(utils.human_readable(0), '0')
self.assertEquals(utils.human_readable(1), '1')
self.assertEquals(utils.human_readable(10), '10')
self.assertEquals(utils.human_readable(100), '100')
self.assertEquals(utils.human_readable(999), '999')
self.assertEquals(utils.human_readable(1024), '1Ki')
self.assertEquals(utils.human_readable(1535), '1Ki')
self.assertEquals(utils.human_readable(1536), '2Ki')
self.assertEquals(utils.human_readable(1047552), '1023Ki')
self.assertEquals(utils.human_readable(1048063), '1023Ki')
self.assertEquals(utils.human_readable(1048064), '1Mi')
self.assertEquals(utils.human_readable(1048576), '1Mi')
self.assertEquals(utils.human_readable(1073741824), '1Gi')
self.assertEquals(utils.human_readable(1099511627776), '1Ti')
self.assertEquals(utils.human_readable(1125899906842624), '1Pi')
self.assertEquals(utils.human_readable(1152921504606846976), '1Ei')
self.assertEquals(utils.human_readable(1180591620717411303424), '1Zi')
self.assertEquals(utils.human_readable(1208925819614629174706176),
'1Yi')
self.assertEquals(utils.human_readable(1237940039285380274899124224),
'1024Yi')
def test_validate_sync_to(self):
for goodurl in ('http://1.1.1.1/v1/a/c/o',
'http://1.1.1.1:8080/a/c/o',
'http://2.2.2.2/a/c/o',
'https://1.1.1.1/v1/a/c/o',
''):
self.assertEquals(utils.validate_sync_to(goodurl,
['1.1.1.1', '2.2.2.2']),
None)
for badurl in ('http://1.1.1.1',
'httpq://1.1.1.1/v1/a/c/o',
'http://1.1.1.1/v1/a/c/o?query',
'http://1.1.1.1/v1/a/c/o#frag',
'http://1.1.1.1/v1/a/c/o?query#frag',
'http://1.1.1.1/v1/a/c/o?query=param',
'http://1.1.1.1/v1/a/c/o?query=param#frag',
'http://1.1.1.2/v1/a/c/o'):
self.assertNotEquals(
utils.validate_sync_to(badurl, ['1.1.1.1', '2.2.2.2']),
None)
def test_TRUE_VALUES(self):
for v in utils.TRUE_VALUES:
self.assertEquals(v, v.lower())
def test_config_true_value(self):
orig_trues = utils.TRUE_VALUES
try:
utils.TRUE_VALUES = 'hello world'.split()
for val in 'hello world HELLO WORLD'.split():
self.assertTrue(utils.config_true_value(val) is True)
self.assertTrue(utils.config_true_value(True) is True)
self.assertTrue(utils.config_true_value('foo') is False)
self.assertTrue(utils.config_true_value(False) is False)
finally:
utils.TRUE_VALUES = orig_trues
def test_streq_const_time(self):
self.assertTrue(utils.streq_const_time('abc123', 'abc123'))
self.assertFalse(utils.streq_const_time('a', 'aaaaa'))
self.assertFalse(utils.streq_const_time('ABC123', 'abc123'))
def test_rsync_ip_ipv4_localhost(self):
self.assertEqual(utils.rsync_ip('127.0.0.1'), '127.0.0.1')
def test_rsync_ip_ipv6_random_ip(self):
self.assertEqual(
utils.rsync_ip('fe80:0000:0000:0000:0202:b3ff:fe1e:8329'),
'[fe80:0000:0000:0000:0202:b3ff:fe1e:8329]')
def test_rsync_ip_ipv6_ipv4_compatible(self):
self.assertEqual(
utils.rsync_ip('::ffff:192.0.2.128'), '[::ffff:192.0.2.128]')
def test_fallocate_reserve(self):
class StatVFS(object):
f_frsize = 1024
f_bavail = 1
def fstatvfs(fd):
return StatVFS()
orig_FALLOCATE_RESERVE = utils.FALLOCATE_RESERVE
orig_fstatvfs = utils.os.fstatvfs
try:
fallocate = utils.FallocateWrapper(noop=True)
utils.os.fstatvfs = fstatvfs
# Want 1023 reserved, have 1024 * 1 free, so succeeds
utils.FALLOCATE_RESERVE = 1023
StatVFS.f_frsize = 1024
StatVFS.f_bavail = 1
self.assertEquals(fallocate(0, 1, 0, ctypes.c_uint64(0)), 0)
# Want 1023 reserved, have 512 * 2 free, so succeeds
utils.FALLOCATE_RESERVE = 1023
StatVFS.f_frsize = 512
StatVFS.f_bavail = 2
self.assertEquals(fallocate(0, 1, 0, ctypes.c_uint64(0)), 0)
# Want 1024 reserved, have 1024 * 1 free, so fails
utils.FALLOCATE_RESERVE = 1024
StatVFS.f_frsize = 1024
StatVFS.f_bavail = 1
exc = None
try:
fallocate(0, 1, 0, ctypes.c_uint64(0))
except OSError, err:
exc = err
self.assertEquals(str(exc), 'FALLOCATE_RESERVE fail 1024 <= 1024')
# Want 1024 reserved, have 512 * 2 free, so fails
utils.FALLOCATE_RESERVE = 1024
StatVFS.f_frsize = 512
StatVFS.f_bavail = 2
exc = None
try:
fallocate(0, 1, 0, ctypes.c_uint64(0))
except OSError, err:
exc = err
self.assertEquals(str(exc), 'FALLOCATE_RESERVE fail 1024 <= 1024')
# Want 2048 reserved, have 1024 * 1 free, so fails
utils.FALLOCATE_RESERVE = 2048
StatVFS.f_frsize = 1024
StatVFS.f_bavail = 1
exc = None
try:
fallocate(0, 1, 0, ctypes.c_uint64(0))
except OSError, err:
exc = err
self.assertEquals(str(exc), 'FALLOCATE_RESERVE fail 1024 <= 2048')
# Want 2048 reserved, have 512 * 2 free, so fails
utils.FALLOCATE_RESERVE = 2048
StatVFS.f_frsize = 512
StatVFS.f_bavail = 2
exc = None
try:
fallocate(0, 1, 0, ctypes.c_uint64(0))
except OSError, err:
exc = err
self.assertEquals(str(exc), 'FALLOCATE_RESERVE fail 1024 <= 2048')
# Want 1023 reserved, have 1024 * 1 free, but file size is 1, so
# fails
utils.FALLOCATE_RESERVE = 1023
StatVFS.f_frsize = 1024
StatVFS.f_bavail = 1
exc = None
try:
fallocate(0, 1, 0, ctypes.c_uint64(1))
except OSError, err:
exc = err
self.assertEquals(str(exc), 'FALLOCATE_RESERVE fail 1023 <= 1023')
# Want 1022 reserved, have 1024 * 1 free, and file size is 1, so
# succeeds
utils.FALLOCATE_RESERVE = 1022
StatVFS.f_frsize = 1024
StatVFS.f_bavail = 1
self.assertEquals(fallocate(0, 1, 0, ctypes.c_uint64(1)), 0)
# Want 1023 reserved, have 1024 * 1 free, and file size is 0, so
# succeeds
utils.FALLOCATE_RESERVE = 1023
StatVFS.f_frsize = 1024
StatVFS.f_bavail = 1
self.assertEquals(fallocate(0, 1, 0, ctypes.c_uint64(0)), 0)
# Want 1024 reserved, have 1024 * 1 free, and even though
# file size is 0, since we're under the reserve, fails
utils.FALLOCATE_RESERVE = 1024
StatVFS.f_frsize = 1024
StatVFS.f_bavail = 1
exc = None
try:
fallocate(0, 1, 0, ctypes.c_uint64(0))
except OSError, err:
exc = err
self.assertEquals(str(exc), 'FALLOCATE_RESERVE fail 1024 <= 1024')
finally:
utils.FALLOCATE_RESERVE = orig_FALLOCATE_RESERVE
utils.os.fstatvfs = orig_fstatvfs
def test_fallocate_func(self):
class FallocateWrapper(object):
def __init__(self):
self.last_call = None
def __call__(self, *args):
self.last_call = list(args)
self.last_call[-1] = self.last_call[-1].value
return 0
orig__sys_fallocate = utils._sys_fallocate
try:
utils._sys_fallocate = FallocateWrapper()
# Ensure fallocate calls _sys_fallocate even with 0 bytes
utils._sys_fallocate.last_call = None
utils.fallocate(1234, 0)
self.assertEquals(utils._sys_fallocate.last_call,
[1234, 1, 0, 0])
# Ensure fallocate calls _sys_fallocate even with negative bytes
utils._sys_fallocate.last_call = None
utils.fallocate(1234, -5678)
self.assertEquals(utils._sys_fallocate.last_call,
[1234, 1, 0, 0])
# Ensure fallocate calls _sys_fallocate properly with positive
# bytes
utils._sys_fallocate.last_call = None
utils.fallocate(1234, 1)
self.assertEquals(utils._sys_fallocate.last_call,
[1234, 1, 0, 1])
utils._sys_fallocate.last_call = None
utils.fallocate(1234, 10 * 1024 * 1024 * 1024)
self.assertEquals(utils._sys_fallocate.last_call,
[1234, 1, 0, 10 * 1024 * 1024 * 1024])
finally:
utils._sys_fallocate = orig__sys_fallocate
class TestStatsdLogging(unittest.TestCase):
def test_get_logger_statsd_client_not_specified(self):
logger = utils.get_logger({}, 'some-name', log_route='some-route')
# white-box construction validation
self.assertEqual(None, logger.logger.statsd_client)
def test_get_logger_statsd_client_defaults(self):
logger = utils.get_logger({'log_statsd_host': 'some.host.com'},
'some-name', log_route='some-route')
# white-box construction validation
self.assert_(isinstance(logger.logger.statsd_client,
utils.StatsdClient))
self.assertEqual(logger.logger.statsd_client._host, 'some.host.com')
self.assertEqual(logger.logger.statsd_client._port, 8125)
self.assertEqual(logger.logger.statsd_client._prefix, 'some-name.')
self.assertEqual(logger.logger.statsd_client._default_sample_rate, 1)
logger.set_statsd_prefix('some-name.more-specific')
self.assertEqual(logger.logger.statsd_client._prefix,
'some-name.more-specific.')
logger.set_statsd_prefix('')
self.assertEqual(logger.logger.statsd_client._prefix, '')
def test_get_logger_statsd_client_non_defaults(self):
logger = utils.get_logger({
'log_statsd_host': 'another.host.com',
'log_statsd_port': '9876',
'log_statsd_default_sample_rate': '0.75',
'log_statsd_sample_rate_factor': '0.81',
'log_statsd_metric_prefix': 'tomato.sauce',
}, 'some-name', log_route='some-route')
self.assertEqual(logger.logger.statsd_client._prefix,
'tomato.sauce.some-name.')
logger.set_statsd_prefix('some-name.more-specific')
self.assertEqual(logger.logger.statsd_client._prefix,
'tomato.sauce.some-name.more-specific.')
logger.set_statsd_prefix('')
self.assertEqual(logger.logger.statsd_client._prefix, 'tomato.sauce.')
self.assertEqual(logger.logger.statsd_client._host, 'another.host.com')
self.assertEqual(logger.logger.statsd_client._port, 9876)
self.assertEqual(logger.logger.statsd_client._default_sample_rate,
0.75)
self.assertEqual(logger.logger.statsd_client._sample_rate_factor,
0.81)
def test_sample_rates(self):
logger = utils.get_logger({'log_statsd_host': 'some.host.com'})
mock_socket = MockUdpSocket()
# encapsulation? what's that?
statsd_client = logger.logger.statsd_client
self.assertTrue(statsd_client.random is random.random)
statsd_client._open_socket = lambda *_: mock_socket
statsd_client.random = lambda: 0.50001
logger.increment('tribbles', sample_rate=0.5)
self.assertEqual(len(mock_socket.sent), 0)
statsd_client.random = lambda: 0.49999
logger.increment('tribbles', sample_rate=0.5)
self.assertEqual(len(mock_socket.sent), 1)
payload = mock_socket.sent[0][0]
self.assertTrue(payload.endswith("|@0.5"))
def test_sample_rates_with_sample_rate_factor(self):
logger = utils.get_logger({
'log_statsd_host': 'some.host.com',
'log_statsd_default_sample_rate': '0.82',
'log_statsd_sample_rate_factor': '0.91',
})
effective_sample_rate = 0.82 * 0.91
mock_socket = MockUdpSocket()
# encapsulation? what's that?
statsd_client = logger.logger.statsd_client
self.assertTrue(statsd_client.random is random.random)
statsd_client._open_socket = lambda *_: mock_socket
statsd_client.random = lambda: effective_sample_rate + 0.001
logger.increment('tribbles')
self.assertEqual(len(mock_socket.sent), 0)
statsd_client.random = lambda: effective_sample_rate - 0.001
logger.increment('tribbles')
self.assertEqual(len(mock_socket.sent), 1)
payload = mock_socket.sent[0][0]
self.assertTrue(payload.endswith("|@%s" % effective_sample_rate),
payload)
effective_sample_rate = 0.587 * 0.91
statsd_client.random = lambda: effective_sample_rate - 0.001
logger.increment('tribbles', sample_rate=0.587)
self.assertEqual(len(mock_socket.sent), 2)
payload = mock_socket.sent[1][0]
self.assertTrue(payload.endswith("|@%s" % effective_sample_rate),
payload)
def test_timing_stats(self):
class MockController(object):
def __init__(self, status):
self.status = status
self.logger = self
self.args = ()
self.called = 'UNKNOWN'
def timing_since(self, *args):
self.called = 'timing'
self.args = args
@utils.timing_stats()
def METHOD(controller):
return Response(status=controller.status)
mock_controller = MockController(200)
METHOD(mock_controller)
self.assertEquals(mock_controller.called, 'timing')
self.assertEquals(len(mock_controller.args), 2)
self.assertEquals(mock_controller.args[0], 'METHOD.timing')
self.assert_(mock_controller.args[1] > 0)
mock_controller = MockController(404)
METHOD(mock_controller)
self.assertEquals(len(mock_controller.args), 2)
self.assertEquals(mock_controller.called, 'timing')
self.assertEquals(mock_controller.args[0], 'METHOD.timing')
self.assert_(mock_controller.args[1] > 0)
mock_controller = MockController(401)
METHOD(mock_controller)
self.assertEquals(len(mock_controller.args), 2)
self.assertEquals(mock_controller.called, 'timing')
self.assertEquals(mock_controller.args[0], 'METHOD.errors.timing')
self.assert_(mock_controller.args[1] > 0)
class TestStatsdLoggingDelegation(unittest.TestCase):
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.bind(('localhost', 0))
self.port = self.sock.getsockname()[1]
self.queue = Queue()
self.reader_thread = Thread(target=self.statsd_reader)
self.reader_thread.setDaemon(1)
self.reader_thread.start()
def tearDown(self):
# The "no-op when disabled" test doesn't set up a real logger, so
# create one here so we can tell the reader thread to stop.
if not getattr(self, 'logger', None):
self.logger = utils.get_logger({
'log_statsd_host': 'localhost',
'log_statsd_port': str(self.port),
}, 'some-name')
self.logger.increment('STOP')
self.reader_thread.join(timeout=4)
self.sock.close()
del self.logger
def statsd_reader(self):
while True:
try:
payload = self.sock.recv(4096)
if payload and 'STOP' in payload:
return 42
self.queue.put(payload)
except Exception, e:
sys.stderr.write('statsd_reader thread: %r' % (e,))
break
def _send_and_get(self, sender_fn, *args, **kwargs):
"""
Because the client library may not actually send a packet with
sample_rate < 1, we keep trying until we get one through.
"""
got = None
while not got:
sender_fn(*args, **kwargs)
try:
got = self.queue.get(timeout=0.5)
except Empty:
pass
return got
def assertStat(self, expected, sender_fn, *args, **kwargs):
got = self._send_and_get(sender_fn, *args, **kwargs)
return self.assertEqual(expected, got)
def assertStatMatches(self, expected_regexp, sender_fn, *args, **kwargs):
got = self._send_and_get(sender_fn, *args, **kwargs)
return self.assert_(re.search(expected_regexp, got),
[got, expected_regexp])
def test_methods_are_no_ops_when_not_enabled(self):
logger = utils.get_logger({
# No "log_statsd_host" means "disabled"
'log_statsd_port': str(self.port),
}, 'some-name')
# Delegate methods are no-ops
self.assertEqual(None, logger.update_stats('foo', 88))
self.assertEqual(None, logger.update_stats('foo', 88, 0.57))
self.assertEqual(None, logger.update_stats('foo', 88,
sample_rate=0.61))
self.assertEqual(None, logger.increment('foo'))
self.assertEqual(None, logger.increment('foo', 0.57))
self.assertEqual(None, logger.increment('foo', sample_rate=0.61))
self.assertEqual(None, logger.decrement('foo'))
self.assertEqual(None, logger.decrement('foo', 0.57))
self.assertEqual(None, logger.decrement('foo', sample_rate=0.61))
self.assertEqual(None, logger.timing('foo', 88.048))
self.assertEqual(None, logger.timing('foo', 88.57, 0.34))
self.assertEqual(None, logger.timing('foo', 88.998, sample_rate=0.82))
self.assertEqual(None, logger.timing_since('foo', 8938))
self.assertEqual(None, logger.timing_since('foo', 8948, 0.57))
self.assertEqual(None, logger.timing_since('foo', 849398,
sample_rate=0.61))
# Now, the queue should be empty (no UDP packets sent)
self.assertRaises(Empty, self.queue.get_nowait)
def test_delegate_methods_with_no_default_sample_rate(self):
self.logger = utils.get_logger({
'log_statsd_host': 'localhost',
'log_statsd_port': str(self.port),
}, 'some-name')
self.assertStat('some-name.some.counter:1|c', self.logger.increment,
'some.counter')
self.assertStat('some-name.some.counter:-1|c', self.logger.decrement,
'some.counter')
self.assertStat('some-name.some.operation:4900.0|ms',
self.logger.timing, 'some.operation', 4.9 * 1000)
self.assertStatMatches('some-name\.another\.operation:\d+\.\d+\|ms',
self.logger.timing_since, 'another.operation',
time.time())
self.assertStat('some-name.another.counter:42|c',
self.logger.update_stats, 'another.counter', 42)
# Each call can override the sample_rate (also, bonus prefix test)
self.logger.set_statsd_prefix('pfx')
self.assertStat('pfx.some.counter:1|c|@0.972', self.logger.increment,
'some.counter', sample_rate=0.972)
self.assertStat('pfx.some.counter:-1|c|@0.972', self.logger.decrement,
'some.counter', sample_rate=0.972)
self.assertStat('pfx.some.operation:4900.0|ms|@0.972',
self.logger.timing, 'some.operation', 4.9 * 1000,
sample_rate=0.972)
self.assertStatMatches('pfx\.another\.op:\d+\.\d+\|ms|@0.972',
self.logger.timing_since, 'another.op',
time.time(), sample_rate=0.972)
self.assertStat('pfx.another.counter:3|c|@0.972',
self.logger.update_stats, 'another.counter', 3,
sample_rate=0.972)
# Can override sample_rate with non-keyword arg
self.logger.set_statsd_prefix('')
self.assertStat('some.counter:1|c|@0.939', self.logger.increment,
'some.counter', 0.939)
self.assertStat('some.counter:-1|c|@0.939', self.logger.decrement,
'some.counter', 0.939)
self.assertStat('some.operation:4900.0|ms|@0.939',
self.logger.timing, 'some.operation',
4.9 * 1000, 0.939)
self.assertStatMatches('another\.op:\d+\.\d+\|ms|@0.939',
self.logger.timing_since, 'another.op',
time.time(), 0.939)
self.assertStat('another.counter:3|c|@0.939',
self.logger.update_stats, 'another.counter', 3, 0.939)
def test_delegate_methods_with_default_sample_rate(self):
self.logger = utils.get_logger({
'log_statsd_host': 'localhost',
'log_statsd_port': str(self.port),
'log_statsd_default_sample_rate': '0.93',
}, 'pfx')
self.assertStat('pfx.some.counter:1|c|@0.93', self.logger.increment,
'some.counter')
self.assertStat('pfx.some.counter:-1|c|@0.93', self.logger.decrement,
'some.counter')
self.assertStat('pfx.some.operation:4760.0|ms|@0.93',
self.logger.timing, 'some.operation', 4.76 * 1000)
self.assertStatMatches('pfx\.another\.op:\d+\.\d+\|ms|@0.93',
self.logger.timing_since, 'another.op',
time.time())
self.assertStat('pfx.another.counter:3|c|@0.93',
self.logger.update_stats, 'another.counter', 3)
# Each call can override the sample_rate
self.assertStat('pfx.some.counter:1|c|@0.9912', self.logger.increment,
'some.counter', sample_rate=0.9912)
self.assertStat('pfx.some.counter:-1|c|@0.9912', self.logger.decrement,
'some.counter', sample_rate=0.9912)
self.assertStat('pfx.some.operation:4900.0|ms|@0.9912',
self.logger.timing, 'some.operation', 4.9 * 1000,
sample_rate=0.9912)
self.assertStatMatches('pfx\.another\.op:\d+\.\d+\|ms|@0.9912',
self.logger.timing_since, 'another.op',
time.time(), sample_rate=0.9912)
self.assertStat('pfx.another.counter:3|c|@0.9912',
self.logger.update_stats, 'another.counter', 3,
sample_rate=0.9912)
# Can override sample_rate with non-keyword arg
self.logger.set_statsd_prefix('')
self.assertStat('some.counter:1|c|@0.987654', self.logger.increment,
'some.counter', 0.987654)
self.assertStat('some.counter:-1|c|@0.987654', self.logger.decrement,
'some.counter', 0.987654)
self.assertStat('some.operation:4900.0|ms|@0.987654',
self.logger.timing, 'some.operation',
4.9 * 1000, 0.987654)
self.assertStatMatches('another\.op:\d+\.\d+\|ms|@0.987654',
self.logger.timing_since, 'another.op',
time.time(), 0.987654)
self.assertStat('another.counter:3|c|@0.987654',
self.logger.update_stats, 'another.counter',
3, 0.987654)
def test_delegate_methods_with_metric_prefix(self):
self.logger = utils.get_logger({
'log_statsd_host': 'localhost',
'log_statsd_port': str(self.port),
'log_statsd_metric_prefix': 'alpha.beta',
}, 'pfx')
self.assertStat('alpha.beta.pfx.some.counter:1|c',
self.logger.increment, 'some.counter')
self.assertStat('alpha.beta.pfx.some.counter:-1|c',
self.logger.decrement, 'some.counter')
self.assertStat('alpha.beta.pfx.some.operation:4760.0|ms',
self.logger.timing, 'some.operation', 4.76 * 1000)
self.assertStatMatches(
'alpha\.beta\.pfx\.another\.op:\d+\.\d+\|ms',
self.logger.timing_since, 'another.op', time.time())
self.assertStat('alpha.beta.pfx.another.counter:3|c',
self.logger.update_stats, 'another.counter', 3)
self.logger.set_statsd_prefix('')
self.assertStat('alpha.beta.some.counter:1|c|@0.9912',
self.logger.increment, 'some.counter',
sample_rate=0.9912)
self.assertStat('alpha.beta.some.counter:-1|c|@0.9912',
self.logger.decrement, 'some.counter', 0.9912)
self.assertStat('alpha.beta.some.operation:4900.0|ms|@0.9912',
self.logger.timing, 'some.operation', 4.9 * 1000,
sample_rate=0.9912)
self.assertStatMatches('alpha\.beta\.another\.op:\d+\.\d+\|ms|@0.9912',
self.logger.timing_since, 'another.op',
time.time(), sample_rate=0.9912)
self.assertStat('alpha.beta.another.counter:3|c|@0.9912',
self.logger.update_stats, 'another.counter', 3,
sample_rate=0.9912)
def test_get_valid_utf8_str(self):
unicode_sample = u'\uc77c\uc601'
valid_utf8_str = unicode_sample.encode('utf-8')
invalid_utf8_str = unicode_sample.encode('utf-8')[::-1]
self.assertEquals(valid_utf8_str,
utils.get_valid_utf8_str(valid_utf8_str))
self.assertEquals(valid_utf8_str,
utils.get_valid_utf8_str(unicode_sample))
self.assertEquals('\xef\xbf\xbd\xef\xbf\xbd\xec\xbc\x9d\xef\xbf\xbd',
utils.get_valid_utf8_str(invalid_utf8_str))
def test_thread_locals(self):
logger = utils.get_logger(None)
orig_thread_locals = logger.thread_locals
try:
self.assertEquals(logger.thread_locals, (None, None))
logger.txn_id = '1234'
logger.client_ip = '1.2.3.4'
self.assertEquals(logger.thread_locals, ('1234', '1.2.3.4'))
logger.txn_id = '5678'
logger.client_ip = '5.6.7.8'
self.assertEquals(logger.thread_locals, ('5678', '5.6.7.8'))
finally:
logger.thread_locals = orig_thread_locals
def test_no_fdatasync(self):
called = []
class NoFdatasync:
pass
def fsync(fd):
called.append(fd)
with patch('swift.common.utils.os', NoFdatasync()):
with patch('swift.common.utils.fsync', fsync):
utils.fdatasync(12345)
self.assertEquals(called, [12345])
def test_yes_fdatasync(self):
called = []
class YesFdatasync:
def fdatasync(self, fd):
called.append(fd)
with patch('swift.common.utils.os', YesFdatasync()):
utils.fdatasync(12345)
self.assertEquals(called, [12345])
def test_fsync_bad_fullsync(self):
called = []
class FCNTL:
F_FULLSYNC = 123
def fcntl(self, fd, op):
raise IOError(18)
with patch('swift.common.utils.fcntl', FCNTL()):
self.assertRaises(OSError, lambda: utils.fsync(12345))
def test_fsync_f_fullsync(self):
called = []
class FCNTL:
F_FULLSYNC = 123
def fcntl(self, fd, op):
called[:] = [fd, op]
return 0
with patch('swift.common.utils.fcntl', FCNTL()):
utils.fsync(12345)
self.assertEquals(called, [12345, 123])
def test_fsync_no_fullsync(self):
called = []
class FCNTL:
pass
def fsync(fd):
called.append(fd)
with patch('swift.common.utils.fcntl', FCNTL()):
with patch('os.fsync', fsync):
utils.fsync(12345)
self.assertEquals(called, [12345])
if __name__ == '__main__':
unittest.main()
| 42.412352 | 79 | 0.574419 |
c3359d2e3ec0f1b3afc3a7e77ff54cf0f7cfb6bd | 4,725 | py | Python | metadataproxy/settings.py | ystarikovich/metadataproxy | 258c9320c4b4455f0a8a54e22ea0c01f455a8d31 | [
"Apache-2.0"
] | null | null | null | metadataproxy/settings.py | ystarikovich/metadataproxy | 258c9320c4b4455f0a8a54e22ea0c01f455a8d31 | [
"Apache-2.0"
] | null | null | null | metadataproxy/settings.py | ystarikovich/metadataproxy | 258c9320c4b4455f0a8a54e22ea0c01f455a8d31 | [
"Apache-2.0"
] | null | null | null | import json
from os import getenv
def bool_env(var_name, default=False):
"""
Get an environment variable coerced to a boolean value.
Example:
Bash:
$ export SOME_VAL=True
settings.py:
SOME_VAL = bool_env('SOME_VAL', False)
Arguments:
var_name: The name of the environment variable.
default: The default to use if `var_name` is not specified in the
environment.
Returns: `var_name` or `default` coerced to a boolean using the following
rules:
"False", "false" or "" => False
Any other non-empty string => True
"""
test_val = getenv(var_name, default)
# Explicitly check for 'False', 'false', and '0' since all non-empty
# string are normally coerced to True.
if test_val in ('False', 'false', '0'):
return False
return bool(test_val)
def float_env(var_name, default=0.0):
"""
Get an environment variable coerced to a float value.
This has the same arguments as bool_env. If a value cannot be coerced to a
float, a ValueError will be raised.
"""
return float(getenv(var_name, default))
def int_env(var_name, default=0):
"""
Get an environment variable coerced to an integer value.
This has the same arguments as bool_env. If a value cannot be coerced to an
integer, a ValueError will be raised.
"""
return int(getenv(var_name, default))
def str_env(var_name, default=''):
"""
Get an environment variable as a string.
This has the same arguments as bool_env.
"""
return getenv(var_name, default)
PORT = int_env('PORT', 45001)
HOST = str_env('HOST', '0.0.0.0')
DEBUG = bool_env('DEBUG', False)
# Url of the docker daemon. The default is to access docker via its socket.
DOCKER_URL = str_env('DOCKER_URL', 'unix://var/run/docker.sock')
# URL of the metadata service. Default is the normal location of the
# metadata service in AWS.
METADATA_URL = str_env('METADATA_URL', 'http://169.254.169.254')
# Whether or not to mock all metadata endpoints. If True, mocked data will be
# returned to callers. If False, all endpoints except for IAM endpoints will be
# proxied through to the real metadata service.
MOCK_API = bool_env('MOCK_API', False)
# When mocking the API, use the following instance id in returned data.
MOCKED_INSTANCE_ID = str_env('MOCKED_INSTANCE_ID', 'mockedid')
# Role to use if IAM_ROLE is not set in a container's environment. If unset
# the container will get no IAM credentials.
DEFAULT_ROLE = str_env('DEFAULT_ROLE')
# The default account ID to assume roles in, if IAM_ROLE does not contain
# account information. If unset, metadataproxy will attempt to lookup role
# ARNs using IAM:GET_ROLE, if the IAM_ROLE name is not an ARN.
DEFAULT_ACCOUNT_ID = str_env('DEFAULT_ACCOUNT_ID')
# A mapping of account names to account IDs. This allows you to use
# user-friendly names in the IAM_ROLE environment variable; for instance:
#
# AWS_ACCOUNT_MAP={'my-account-name':'12345'}
#
# A lookup of myrole@my-account-name would map to
#
# role_name: myrole
# account_id: 12345
AWS_ACCOUNT_MAP = json.loads(str_env('AWS_ACCOUNT_MAP', '{}'))
# The threshold before credentials expire in minutes at which metadataproxy will attempt
# to load new credentials. The default in previous versions of metadataproxy was 5, but
# we choose to make the new default 15 for better compatibility with aws-sdk-java.
ROLE_EXPIRATION_THRESHOLD = int_env('ROLE_EXPIRATION_THRESHOLD', 15)
# A json file that has a dict mapping of IP addresses to role names. Can be
# used if docker networking has been disabled and you are managing IP
# addressing for containers through another process.
ROLE_MAPPING_FILE = str_env('ROLE_MAPPING_FILE')
# Do a reverse lookup of incoming IP addresses to match containers by hostname.
# Useful if you've disabled networking in docker, but set hostnames for
# containers in /etc/hosts or DNS.
ROLE_REVERSE_LOOKUP = bool_env('ROLE_REVERSE_LOOKUP', False)
# Limit reverse lookup container matching to hostnames that match the specified
# pattern.
HOSTNAME_MATCH_REGEX = str_env('HOSTNAME_MATCH_REGEX', '^.*$')
# Optional key in container labels or environment variables to use for role session name.
# Prefix with Labels: or Env: respectively to indicate where key should be found.
ROLE_SESSION_KEY = str_env('ROLE_SESSION_KEY')
# In case we also want to query the mesos state api
MESOS_STATE_LOOKUP = bool_env('MESOS_STATE_LOOKUP', False)
# URL of the mesos state endpoint to query
MESOS_STATE_URL = str_env('MESOS_STATE_URL', 'http://localhost:5051/state')
# Timeout to use when calling the mesos state endpoint
MESOS_STATE_TIMEOUT = int_env('MESOS_STATE_TIMEOUT', 2)
| 41.447368 | 89 | 0.732063 |
4879085ecf897f57cf2e9abe4220090920a6c3bb | 1,027 | py | Python | lib/bx/seq/fasta_tests.py | mr-c/bx-python | 0b2b766eee008d1f7a2814be4ddd2c5dc3787537 | [
"MIT"
] | 122 | 2015-07-01T12:00:22.000Z | 2022-03-02T09:27:35.000Z | lib/bx/seq/fasta_tests.py | mr-c/bx-python | 0b2b766eee008d1f7a2814be4ddd2c5dc3787537 | [
"MIT"
] | 64 | 2015-11-06T21:03:18.000Z | 2022-03-24T00:55:27.000Z | lib/bx/seq/fasta_tests.py | mr-c/bx-python | 0b2b766eee008d1f7a2814be4ddd2c5dc3787537 | [
"MIT"
] | 60 | 2015-10-05T19:19:36.000Z | 2021-11-19T20:53:54.000Z | """
Tests for `bx.seq.fasta`.
"""
import unittest
from . import fasta
test_fa = "test_data/seq_tests/test.fa"
# Same sequence data as stored in test.fa
valid_seq = "TGGAGGCATTTGTGATTCAATAGATGCAGAAAGAAACCTTCCTAGAGCTG" \
+ "GCGTTCTCTAACTAAAAGTGGAAAGTTCTGAGGAATGAGGACTGTTATAA" \
+ "ATCCCACCCCACACCGCACCTTCTCCAGGGAAGTTTCATGGCCGTGAAGA" \
+ "GGACAGAAAGTGAGAACCAAGATggaactgaataaacaagcttcacactg" \
+ "ttagtttccccatatgcttaccttcccacagatgccaaccttggaggcct" \
+ "aagaggcctagaatattatcctttgtctgatcatttctctacaaatttat" \
+ "tgttctttgttaagatgctacataagcccaaattctaaccacccctttga" \
+ "gttacccatcatcaagtttctcccatgtg"
valid_seq_len = len(valid_seq)
class FASTATestCase(unittest.TestCase):
def test_get(self):
fastafile = fasta.FastaFile(open(test_fa, "rb"))
check_get(fastafile, 0, valid_seq_len)
check_get(fastafile, 0, 40)
check_get(fastafile, valid_seq_len - 40, 40)
def check_get(fastafile, start, len):
assert fastafile.get(start, len) == valid_seq[start:start+len]
| 28.527778 | 66 | 0.752678 |
e61c9a932f2c9dc7a40dc1e396fe8ef7c0d1ef78 | 3,908 | py | Python | keras-mnist-3.0.py | EitlerPereira/keras-mnist-workshop | 98803e19115e57373a749b2a89a4783544346430 | [
"Apache-2.0"
] | 18 | 2017-07-28T14:35:14.000Z | 2020-03-03T12:37:49.000Z | keras-mnist-3.0.py | EitlerPereira/keras-mnist-workshop | 98803e19115e57373a749b2a89a4783544346430 | [
"Apache-2.0"
] | null | null | null | keras-mnist-3.0.py | EitlerPereira/keras-mnist-workshop | 98803e19115e57373a749b2a89a4783544346430 | [
"Apache-2.0"
] | 11 | 2017-07-28T02:12:03.000Z | 2020-02-19T16:31:53.000Z | # encoding: UTF-8
# Copyright 2017 Udacity.com
# Authored by Daniel Rodrigues Loureiro (drlschilling@gmail.com)
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Importamos o conjunto de dados MNIST
from keras.datasets import mnist
# Para visualizarmos bem a sequencia de camadas do modelo
# vamos usar o modulo do Keras chamado Sequential
# (https://keras.io/getting-started/sequential-model-guide/)
from keras.models import Sequential
# Como estamos construindo um modelo simples vamos utilizar
# camadas densas, que sao simplesmente camadas onde cada unidade
# ou neuronio estara conectado a cada neurônio na proxima camada.
from keras.layers import Dense
# Modulo do Keras responsavel por varias rotinas de pre-processamento
# (https://keras.io/utils/).
from keras.utils import np_utils
import keras.optimizers
# Assim como vimos anteriormente em nosso exemplo de visualizacao
# aqui estamos carregando o conjunto de dados em subconjuntos de
# treinamento e teste.
(X_train, y_train), (X_test, y_test) = mnist.load_data()
num_pixels = X_train.shape[1] * X_train.shape[2]
# Com o intuito de amenizar o uso de memoria podemos atribuir um nivel precisao dos
# valores de pixel com sendo 32 bits (float32)
X_train = X_train.reshape(X_train.shape[0], num_pixels).astype('float32')
X_test = X_test.reshape(X_test.shape[0], num_pixels).astype('float32')
# Podemos normaliza os valores de pixels para o intervalo 0 e 1
# dividindo cada valor pelo máximo de 255, visto que os valores
# de pixel estao escala de cinza entre 0 e 255.
X_train = X_train / 255
X_test = X_test / 255
# Como estamos trabalhando com um problema de classificacao
# multiclasses, pois temos varios tipos de digitos, vamos
# represanta-los em categorias usando a metodologia de
# one-hot-encoding aqui representada pela funcao to_categorical.
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
# Numero de tipos de digitos encontrados no MNIST.
num_classes = y_test.shape[1]
# Modelo basico de tres camadas onde inicializamos um modelo sequencial
# com suas funcoes de ativacao, e o compilamos usando um otimizador com
# uma taxa de aprendizagem inicial de 0.01 que sofre decaimento.
second_layer = int(num_pixels/4) # pixels da segunda camada
third_layer = int(num_pixels/16) # pixels da terceira camada
def base_model():
model = Sequential()
model.add(Dense(num_pixels, input_dim=num_pixels, kernel_initializer='normal', activation='relu'))
model.add(Dense(second_layer, input_dim=num_pixels, kernel_initializer='normal', activation='relu'))
model.add(Dense(third_layer, input_dim=second_layer, kernel_initializer='normal', activation='relu'))
model.add(Dense(num_classes, kernel_initializer='normal', activation='softmax', name='preds'))
adam = keras.optimizers.Adam(lr=0.01, decay=1e-6)
model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])
return model
model = base_model()
# O metodo summary revela quais sao as camadas
# que formam o modelo, seus formatos e o numero
# de parametros envolvidos em cada etapa.
model.summary()
# Processo de treinamento do modelo.
model.fit(X_train, y_train, validation_data=(X_test, y_test), epochs=5, batch_size=100, verbose=2)
# Avaliacao da performance do nosso primeiro modelo.
scores = model.evaluate(X_test, y_test, verbose=0)
print("Erro de: %.2f%%" % (100-scores[1]*100))
| 41.574468 | 105 | 0.771238 |
4d374ded1592f9dc8a8e5d70f84058f70ab0a8f4 | 503 | py | Python | Programs/oopsBasics/course.py | LuciKritZ/python | ed5500f5aad3cb15354ca5ebf71748029fc6ae77 | [
"MIT"
] | null | null | null | Programs/oopsBasics/course.py | LuciKritZ/python | ed5500f5aad3cb15354ca5ebf71748029fc6ae77 | [
"MIT"
] | null | null | null | Programs/oopsBasics/course.py | LuciKritZ/python | ed5500f5aad3cb15354ca5ebf71748029fc6ae77 | [
"MIT"
] | null | null | null | class Course:
# Parameterized Constructor
def __init__(self, name, ratings):
self.name = name
self.ratings = ratings
def average(self):
numberOfRatings = len(self.ratings)
average = sum(self.ratings)/numberOfRatings
print("Average ratings for ", self.name, "is", average)
c1 = Course("Java", [1,2,3,4,5])
print(c1.name)
print(c1.ratings)
c1.average()
c2 = Course("Java Web Services", [5,5,5,5])
print(c2.name)
print(c2.ratings)
c2.average() | 25.15 | 63 | 0.638171 |
8a4601483ae6a02ea6df1934e7b5b759263eeec8 | 15,018 | py | Python | stellapy/data/read_netcdf.py | SStroteich/stella-1 | 104556a07b9736e7c28e6f1bf2f799384732f38b | [
"MIT"
] | 4 | 2021-12-15T08:23:45.000Z | 2022-02-18T15:14:42.000Z | stellapy/data/read_netcdf.py | SStroteich/stella-1 | 104556a07b9736e7c28e6f1bf2f799384732f38b | [
"MIT"
] | 37 | 2021-07-05T16:41:33.000Z | 2022-03-21T15:58:05.000Z | stellapy/data/read_netcdf.py | SStroteich/stella-1 | 104556a07b9736e7c28e6f1bf2f799384732f38b | [
"MIT"
] | 7 | 2021-07-05T15:35:55.000Z | 2022-03-09T09:23:42.000Z |
# TODO: To save memory, save only unique time and z axis in dictionaries and use references
import numpy as np
import h5py, copy, os
from scipy.io import netcdf as scnetcdf
#===========================================
# READ THE NETCDF FILE
#===========================================
def read_netcdf(netcdf_file, data=["dimensions", "time", "z", "phi2", "fluxes", "density"]):
''' Read the "*out.nc" or "*out.h5" file of the stella simulation.
Parameters
----------
data : ["dimensions", "standard", "fluxes", "density"]
Split the reading up in multiple parts since most plotting functions only
require a limited set of data, therefore save memory and processing time.
'''
# Check whether the netcdf_file is indeed the netcdf_file or the reduced netcdf file
if os.path.isfile(netcdf_file) and netcdf_file.suffix==".nc": pass
elif os.path.isfile(netcdf_file) and netcdf_file.suffix==".h5": pass
# Otherwise, turn the input file into a netcdf file
elif os.path.isfile(netcdf_file.with_suffix('.out.h5')): netcdf_file = netcdf_file.with_suffix('.out.h5')
elif os.path.isfile(netcdf_file.with_suffix('.out.nc')): netcdf_file = netcdf_file.with_suffix('.out.nc')
else: return
# READ "OUT.NC" file
if netcdf_file.suffix =='.nc':
return read_netcdfFromNcFile(netcdf_file, data)
# READ "OUT.h5" file
elif netcdf_file.suffix =='.h5':
return read_netcdfFromH5File(netcdf_file, data)
return
#-------------------------------------
def read_netcdfFromNcFile(netcdf_file, data):
''' Read the "*out.nc" file of the stella simulation.'''
# Open the "out.nc" file
netcdf_file = scnetcdf.netcdf_file(netcdf_file,'r')
# Safe the data to a dictionary, use deep copies, otherwise the netcdf file refuses to close since mmap=True
netcdf_data = {}
# Read the dimensions of the axes as well as the values of the modes
if "dimensions" in data:
netcdf_data['dim_species'] = copy.deepcopy(np.copy(netcdf_file.dimensions['species']))
netcdf_data['dim_kx'] = copy.deepcopy(np.copy(netcdf_file.dimensions['kx']))
netcdf_data['dim_ky'] = copy.deepcopy(np.copy(netcdf_file.dimensions['ky']))
netcdf_data['dim_z'] = copy.deepcopy(np.copy(netcdf_file.dimensions['zed']))
netcdf_data['vec_kx'] = copy.deepcopy(np.copy(netcdf_file.variables['kx'][:]))
netcdf_data['vec_ky'] = copy.deepcopy(np.copy(netcdf_file.variables['ky'][:]))
netcdf_data['dim_time'] = copy.deepcopy(np.copy(netcdf_file.dimensions['t']))
if netcdf_data['dim_time']==None:
vec_time = copy.deepcopy(np.copy(netcdf_file.variables['t'][:]))
netcdf_data['dim_time'] = len(vec_time)
#print("WARNING: There was a bug in dim_time variable in the netcdf file.")
# Get the time axis
if "time" in data:
netcdf_data['vec_time'] = copy.deepcopy(np.copy(netcdf_file.variables['t'][:]))
# Get the z axis
if "z" in data:
netcdf_data['vec_z'] = copy.deepcopy(np.copy(netcdf_file.variables['zed'][:]))
# Get the electrostatic potential averaged over z as function of (t,kx,ky)
if "phi2" in data:
netcdf_data['phi2_vs_kxky'] = copy.deepcopy(np.copy(netcdf_file.variables['phi2_vs_kxky'][:]))
# Get the fluxes averaged over z as function of (ky,kx,t)
if "fluxes" in data:
for key in ['qflx_kxky', 'pflx_kxky', 'vflx_kxky']:
try: netcdf_data[key] = copy.deepcopy(np.copy(netcdf_file.variables[key][:]))
except: netcdf_data[key] = np.NaN
# Get the density fluctuation (dim_species,kx,ky)?
if "density" in data:
netcdf_data['vec_density'] = copy.deepcopy(np.copy(netcdf_file.variables['dens'][:])*1e19)
# Close the file
netcdf_file.close()
# Return the data read from the netcdf file
return netcdf_data
#-------------------------------------
def read_netcdfFromH5File(netcdf_file, data):
''' Read the "*out.h5" file of the stella simulation.'''
# Open the "out.h5" file
h5_netcdf = h5py.File(netcdf_file, 'r')
# Safe the data to a dictionary
netcdf_data = {}
# Read the dimensions of the axes as well as the values of the modes
if "dimensions" in data:
for quant in ['dim_species', 'dim_kx', 'dim_ky', 'dim_time', 'dim_z']:
netcdf_data[quant] = h5_netcdf.attrs[quant]
for quant in ['vec_kx', 'vec_ky']:
netcdf_data[quant] = h5_netcdf[quant][:]
# Get the time axis
if "time" in data:
netcdf_data['vec_time'] = h5_netcdf['vec_time'][:]
# Get the z axis
if "z" in data:
netcdf_data['vec_z'] = h5_netcdf['vec_z'][:]
# Get the electrostatic potential averaged over z as function of (t,kx,ky)
if "phi2" in data:
netcdf_data['phi2_vs_kxky'] = h5_netcdf['phi2_vs_kxky'][:]
# Get the fluxes averaged over z as function of (ky,kx,t)
if "fluxes" in data:
for quant in ['qflx_kxky', 'pflx_kxky', 'vflx_kxky']:
try: netcdf_data[quant] = h5_netcdf[quant][:]
except: netcdf_data[quant] = np.array([np.NaN])
# Get the density fluctuation (dim_species,kx,ky)?
if "density" in data:
try: netcdf_data['vec_density'] = h5_netcdf['vec_density'][:]
except: netcdf_data['vec_density'] = np.NaN
# Close the file
h5_netcdf.close()
# Return the data read from the netcdf file
return netcdf_data
if True: return
#==================================================
# ATTACH THE NETCDF DATA TO THE SIMULATION OBJECT
#==================================================
def get_netcdfDimensions(self):
""" Add the kx, ky, time and z dimensions as attributes to the simulation object."""
# Initiate the attributes
self.vec_kx, self.vec_ky = [], []
self.vec_kxPerFile, self.vec_kyPerFile = {}, {}
self.dim_species, self.dim_time, self.dim_z, self.dim_kx, self.dim_ky = 0, 0, 0, 0, 0
self.dim_timePerFile, self.dim_zPerFile, self.dim_kxPerFile, self.dim_kyPerFile = {}, {}, {}, {}
# Read the data for each input file that might differ between the input files
# Each input file usually contains different modes (kx,ky)
# But the time and spatial resolution might also be changed giving different vec_time and vec_z
# Therefore dim_time is the maximum dimension among the input files, to construct the arrays
# Remove the zero from the time axis since all output files start counting from delta t
for input_file in self.input_files:
# Show the reading progress
if self.Progress: i = self.input_files.index(input_file); length = len(self.input_files)
if self.Progress: self.Progress.move(i/length*100,"Determining dimensions ("+str(i)+"/"+str(length)+")")
# Read the netcdf data to determine the maximum dimensions of the input files
netcdf_data = read_netcdf(input_file, ["dimensions"])
self.dim_species = np.max([netcdf_data['dim_species'], self.dim_species])
self.dim_time = np.max([netcdf_data['dim_time']-1, self.dim_time])
self.dim_z = np.max([netcdf_data['dim_z'], self.dim_z])
self.vec_kx += list(netcdf_data['vec_kx'])
self.vec_ky += list(netcdf_data['vec_ky'])
# For reading other output files we need the exact dimensions of each input file
self.dim_timePerFile[input_file] = netcdf_data['dim_time']-1
self.dim_zPerFile[input_file] = netcdf_data['dim_z']
self.dim_kxPerFile[input_file] = len(list(netcdf_data['vec_kx']))
self.dim_kyPerFile[input_file] = len(list(netcdf_data['vec_ky']))
self.vec_kxPerFile[input_file] = list(netcdf_data['vec_kx'])
self.vec_kyPerFile[input_file] = list(netcdf_data['vec_ky'])
del netcdf_data
# Get the total amount of modes and sort them
self.vec_kx = list(set(self.vec_kx)); self.vec_kx.sort()
self.vec_ky = list(set(self.vec_ky)); self.vec_ky.sort()
# Get the dimensions of the modes
self.dim_kx = len(self.vec_kx)
self.dim_ky = len(self.vec_ky)
return
#-----------------------------
def get_netcdfTimeVsKxKy(self):
""" Add the time axis for each mode (kx,ky) of a linear simulation as an attribute of the simulation object. """
# Initiate the attribute
self.time_kxky = np.empty((self.dim_time, self.dim_kx, self.dim_ky)); self.time_kxky[:,:,:] = np.NaN
# Read the data for each input file
for input_file in self.input_files:
# Show the reading progress
if self.Progress: i = self.input_files.index(input_file); length = len(self.input_files)
if self.Progress: self.Progress.move(i/length*100,"Reading the time axis ("+str(i)+"/"+str(length)+")")
# Read the netcdf data to get time, z and phi2
netcdf_data = read_netcdf(input_file, ["time"])
dim_time = self.dim_timePerFile[input_file]
for kx in self.vec_kxPerFile[input_file]:
for ky in self.vec_kyPerFile[input_file]:
index_kx = self.vec_kx.index(kx)
index_ky = self.vec_ky.index(ky)
self.time_kxky[0:dim_time, index_kx, index_ky] = netcdf_data['vec_time'][1:]
del netcdf_data
return
#-----------------------------
def get_netcdfZVsKxKy(self):
""" Add the z axis for each mode (kx,ky) as an attribute of the simulation object. """
# Initiate the attributes: the time, z and phi2 can differ for each input file
self.z_kxky = np.empty((self.dim_z, self.dim_kx, self.dim_ky)); self.z_kxky[:,:,:] = np.NaN
# Read the data for each input file
for input_file in self.input_files:
# Show the reading progress
if self.Progress: i = self.input_files.index(input_file); length = len(self.input_files)
if self.Progress: self.Progress.move(i/length*100,"Reading the z axis ("+str(i)+"/"+str(length)+")")
# Read the netcdf data to get time, z and phi2
netcdf_data = read_netcdf(input_file, ["z"])
dim_z = self.dim_zPerFile[input_file]
for kx in self.vec_kxPerFile[input_file]:
for ky in self.vec_kyPerFile[input_file]:
index_kx = self.vec_kx.index(kx)
index_ky = self.vec_ky.index(ky)
self.z_kxky[0:dim_z, index_kx, index_ky] = netcdf_data['vec_z']
del netcdf_data
return
#-----------------------------
def get_netcdfPhi2VsKxKy(self):
""" Add the potential squared for each mode (kx,ky) as an attribute of the simulation object. """
# Initiate the attributes: the time, z and phi2 can differ for each input file
self.phi2_kxky = np.empty((self.dim_time, self.dim_kx, self.dim_ky)); self.phi2_kxky[:,:,:] = np.NaN
# Read the data for each input file
for input_file in self.input_files:
# Show the reading progress
if self.Progress: i = self.input_files.index(input_file); length = len(self.input_files)
if self.Progress: self.Progress.move(i/length*100,"Reading the potential ("+str(i)+"/"+str(length)+")")
# Read the netcdf data to get time, z and phi2
netcdf_data = read_netcdf(input_file, ["phi2"])
dim_time = self.dim_timePerFile[input_file]
for kx in self.vec_kxPerFile[input_file]:
for ky in self.vec_kyPerFile[input_file]:
i_kx = self.vec_kxPerFile[input_file].index(kx)
i_ky = self.vec_kyPerFile[input_file].index(ky)
index_kx = self.vec_kx.index(kx)
index_ky = self.vec_ky.index(ky)
self.phi2_kxky[0:dim_time, index_kx, index_ky] = netcdf_data['phi2_vs_kxky'][1:, i_kx, i_ky]
del netcdf_data
return
#----------------------------
def get_netcdfFluxesKxKy(self):
""" Read the fluxes versus (kx,ky) from the netcdf file of the simulation. """
# Initiate the attributes: Save the fluxes per (kx,ky)
self.pflx_kxky = np.empty((self.dim_time, self.dim_kx, self.dim_ky)); self.pflx_kxky[:,:,:] = np.NaN
self.qflx_kxky = np.empty((self.dim_time, self.dim_kx, self.dim_ky)); self.qflx_kxky[:,:,:] = np.NaN
self.vflx_kxky = np.empty((self.dim_time, self.dim_kx, self.dim_ky)); self.vflx_kxky[:,:,:] = np.NaN
# Read the data for each input file
for input_file in self.input_files:
# Show the reading progress
if self.Progress: i = self.input_files.index(input_file); length = len(self.input_files)
if self.Progress: self.Progress.move(i/length*100,"Reading the fluxes ("+str(i)+"/"+str(length)+")")
# Read the netcdf data to get the fluxes
netcdf_data = read_netcdf(input_file, ["fluxes"])
dim_time = self.dim_timePerFile[input_file]
for kx in self.vec_kxPerFile[input_file]:
for ky in self.vec_kyPerFile[input_file]:
i_kx = self.vec_kxPerFile[input_file].index(kx)
i_ky = self.vec_kyPerFile[input_file].index(ky)
index_kx = self.vec_kx.index(kx)
index_ky = self.vec_ky.index(ky)
self.pflx_kxky[0:dim_time, index_kx, index_ky] = netcdf_data['pflx_kxky'][:, i_kx, i_ky]
self.qflx_kxky[0:dim_time, index_kx, index_ky] = netcdf_data['qflx_kxky'][:, i_kx, i_ky]
self.vflx_kxky[0:dim_time, index_kx, index_ky] = netcdf_data['vflx_kxky'][:, i_kx, i_ky]
del netcdf_data
return
#---------------------------
def get_netcdfDensity(self):
""" Read the density versus (species, kx,ky)? from the netcdf file of the simulation. """
# TODO: Not sure what the dimensions are of the density variable, thus this could be wrong.
# Initiate the attributes: Save the density per (kx,ky)
self.density_kxky = np.empty((self.dim_species, self.dim_kx, self.dim_ky)); self.density_kxky[:,:,:] = np.NaN
# Read the data for each input file
for input_file in self.input_files:
# Read the netcdf data to get density(time, kx, ky)
netcdf_data = read_netcdf(input_file, ["density"])
for kx in self.vec_kxPerFile[input_file]:
for ky in self.vec_kyPerFile[input_file]:
i_kx = self.vec_kxPerFile[input_file].index(kx)
i_ky = self.vec_kyPerFile[input_file].index(ky)
index_kx = self.vec_kx.index(kx)
index_ky = self.vec_ky.index(ky)
self.density_kxky[:, index_kx, index_ky] = netcdf_data['vec_density'][:, i_kx, i_ky]
del netcdf_data
return
| 46.93125 | 116 | 0.616061 |
d731a0311325b0ab264e1ef585d33f0ea4907e44 | 4,589 | py | Python | test/test_workflow.py | ewebed/biggerquery | 7892cf394444fc44a731da44ffbf0a029343c3c8 | [
"Apache-2.0"
] | null | null | null | test/test_workflow.py | ewebed/biggerquery | 7892cf394444fc44a731da44ffbf0a029343c3c8 | [
"Apache-2.0"
] | null | null | null | test/test_workflow.py | ewebed/biggerquery | 7892cf394444fc44a731da44ffbf0a029343c3c8 | [
"Apache-2.0"
] | null | null | null | from collections import OrderedDict
import mock
from unittest import TestCase
from bigflow.workflow import Workflow, Definition, InvalidJobGraph, WorkflowJob
class WorkflowTestCase(TestCase):
def test_should_run_jobs(self):
# given
definition = [mock.Mock() for i in range(100)]
workflow = Workflow(workflow_id='test_workflow', definition=definition)
# when
workflow.run('2019-01-01')
# then
for step in definition:
step.assert_has_calls([mock.call.run(runtime='2019-01-01')])
def test_should_run_single_job(self):
# given
first_job = mock.Mock()
setattr(first_job, 'id', 'first job')
definition = [first_job] + [mock.Mock() for i in range(100)]
workflow = Workflow(workflow_id='test_workflow', definition=definition)
# when
workflow.run_job('first job', '2020-01-01')
# then
for step in definition[1:]:
step.assert_not_called()
# and
first_job.assert_has_calls([mock.call.run('2020-01-01')])
def test_should_have_id_and_schedule_interval(self):
# given
workflow = Workflow(
workflow_id='test_workflow',
definition=[],
schedule_interval='@hourly')
# expected
self.assertEqual(workflow.schedule_interval, '@hourly')
def test_should_throw_exception_when_circular_dependency_is_found(self):
# given
original_job = mock.Mock()
job1, job2, job3, job4 = [WorkflowJob(original_job, i) for i in range(4)]
# job1 --- job2
# | |
# \ |
# \ |
# \ |
# \ |
# \ |
# \ |
# job3
job_graph = {
job1: (job2,),
job2: (job3,),
job3: (job1,)
}
# expected
with self.assertRaises(InvalidJobGraph):
Definition(job_graph)
# given
# job1 job4
# | | |
# | | |
# | | |
# job2 ------ job3
job_graph = {
job1: (job2,),
job2: (job3,),
job3: (job4,),
job4: (job3,)
}
# expected
with self.assertRaises(InvalidJobGraph):
Definition(job_graph)
def test_should_run_jobs_in_order_accordingly_to_graph_schema(self):
# given
original_job = mock.Mock()
job1, job2, job3, job4, job5, job6, job7, job8, job9 = [WorkflowJob(original_job, i) for i in range(9)]
job_graph = OrderedDict([
(job1, (job5, job6)),
(job2, (job6,)),
(job3, (job6,)),
(job4, (job7,)),
(job6, (job8,)),
(job7, (job8,)),
(job5, (job9,))
])
# job1 job2 job3 job4
# | \ | / |
# | \ | / |
# | \ | / |
# | \ | / |
# job5 job6 job7
# | \ /
# | \ /
# | \ /
# | \ /
# | \ /
# job9 job8
definition = Definition(job_graph)
workflow = Workflow(workflow_id='test_workflow', definition=definition, schedule_interval='@hourly')
# expected
self.assertEqual(list(workflow.build_sequential_order()), [job1, job5, job9, job2, job3, job6, job4, job7, job8])
# given
job_graph = OrderedDict([
(job1, (job5, job6, job7)),
(job2, (job6,)),
(job3, (job6,)),
(job4, (job7,)),
(job6, (job8,)),
(job7, (job8,)),
(job5, (job9,)),
(job6, (job9,))
])
# job1 job2 job3 job4
# | \ | / |
# | \ | / |
# | \ | / |
# | \ | / |
# job5 job6 job7
# | / \ /
# | / \ /
# | / \ /
# | / \ /
# | / \ /
# job9 job8
definition = Definition(job_graph)
workflow = Workflow(workflow_id='test_workflow', definition=definition, schedule_interval='@hourly')
# expected
self.assertEqual(workflow.build_sequential_order(), [job1, job5, job2, job3, job6, job9, job4, job7, job8])
| 29.416667 | 121 | 0.449553 |
c755d79d6284afc9a634f756312bb6cd2c26e9fe | 20,038 | py | Python | Simulation_decimal_patch.py | jecki/CoopSim | 65756b0a98d894e5a10c63431bb119a0b9c3e363 | [
"MIT"
] | 4 | 2015-09-06T18:48:36.000Z | 2021-04-13T00:38:06.000Z | Simulation_decimal_patch.py | jecki/CoopSim | 65756b0a98d894e5a10c63431bb119a0b9c3e363 | [
"MIT"
] | null | null | null | Simulation_decimal_patch.py | jecki/CoopSim | 65756b0a98d894e5a10c63431bb119a0b9c3e363 | [
"MIT"
] | null | null | null | # simulation setup and simulation objects
from __future__ import generators # retain python 2.2 compatibility
from decimal import *
import copy, re
from PyPlotter import Gfx, Colors
from PopulationDynamics import Dynamics
import PrisonersDilemma as PD
from PopulationDynamics.Compatibility import *
from Logging import H1,H2,H3, H1X,H2X,H3X, LogNotificationInterface, HTMLLog
NUM_GENERATIONS = 50 # number of generations to start with in the
# population dynamical simulation
NUM_SAMPLES = PD.NUM_SAMPLES # Samples to take of a match if randomizing
# strategies or in game noise is involved
NO_GRAPH_OPTIMIZATION = False # graph drawing will be slowed down but
# accuracy is increased to printing and saving
###############################################################################
#
# classes for the description of simulation setups
#
###############################################################################
class Mutator(object):
"""Describes the mutation of a strategy in the simulation.
original = int: the ordinal number of the strategy that is going
to mutate
mutated = int: the ordinal number of the startegy that 'original' is
going to mutate into
rate = float [0.0 - 1.0]: mutation rate
"""
def __eq__(self, other):
if not isinstance(other, Mutator): return False
if self.original != other.original: return False
if self.mutated != other.mutated: return False
if self.rate != other.rate: return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __init__(self, original, mutated, rate=0.01):
self.original = original
self.mutated = mutated
self.rate = rate
class SimSetup(object):
"""Contains all data defining a simulation.
name = string: name of the model
strategyList = list of Strategy objects: the list of the strategies
population = tuple: population share for each strategy
correlation = float [0.0-1.0]: correlation factor
gameNoise = float [0.0-1.0]: in game noise
noise = float [0.0-1.0]: evolutionary background noise
iterations = int: number of iterations for one match
samples = int: number of sample matches to take (only useful for
randomizing strategies)
payoff = tuple of floats: payoff tuple (T, R, P, S)
mutators = list of Mutator objects: description of possible
mutation of strategies during the course of the
evolutionary development.
cachedPM = cached payoff matrix
cachedLog = cached tournament log object
"""
def __eq__(self, other):
if not isinstance(other, SimSetup): return False
# names make no difference! if self.name != other.name: return False
if len(self.strategyList) != len(other.strategyList):
return False
for i in xrange(len(self.strategyList)):
if self.strategyList[i] != other.strategyList[i]: return False
if self.population != other.population: return False
if self.correlation != other.correlation: return False
if self.gameNoise != other.gameNoise: return False
if self.noise != other.noise: return False
if self.iterations != other.iterations: return False
if self.samples != other.samples: return False
if self.payoff != other.payoff: return False
if len(self.mutators) != len(other.mutators):
return False
for i in xrange(len(self.mutators)):
if self.mutators[i] != other.mutators[i]: return False
return True
def __ne__(self, other):
return not self.__eq__(other)
## def __setattr__(self, name, value):
## object.__setattr__(self, name, value)
## if name == "population":
## assert len(value) == len(self.strategyList),
## "population vector does not match length of strategy list!"
## elif name == "strategyList":
## if len(value) != len(self.population):
## obejct.__setattr__(self, "population",
## Dynamics.UniformDistribution(len(value)))
def __init__(self, name, strategyList = [], population = None,
correlation = 0.0, gameNoise = 0.0, noise = 0.0,
iterations = 200, samples = NUM_SAMPLES, payoff = (5.,3.,1.,0.),
mutators = [], PM = None, log = None):
self.name = name
self.strategyList = strategyList
if population == None:
self.population = Dynamics.UniformDistribution(len(self.strategyList))
else: self.population = population
self.correlation = correlation
self.gameNoise = gameNoise
self.noise = noise
self.iterations = iterations
self.samples = samples
self.payoff = payoff
self.mutators = mutators
self.cachedPM = PM
self.cachedLog = log
self._userDefined = True # SimApp marks its own setups as False
def fname(self):
"""Returns the name of the setup as a proper file name."""
return self.name.replace("*","x").replace("?","x").replace("/","x")\
.replace("\\","x").replace(" ","_")
def htmlRepresentation(self):
"""Returns extensive information about this setup in HTML format."""
def rr(s):
"replace trailing zeros with blanks"
l = len(s); s2 = s.rstrip("0")
return (s2 + " "*(l - len(s2)))
html = ["<p>" + H2 + '<a name="setup"></a>Simulation setup of ' + \
self.name + H2X + "<br />\n\n"]
## t = "<b>Strategies:</b> "
## for s in self.strategyList:
## t += str(s)
## if len(t) >= 70:
## t += "<br />\n"; html.append(t); t = ""
## else: t += ", "
## if t != "": html.append(t[:-2]+"<br />\n")
html.append("<b>Strategies:</b> ")
snames = [str(s) for s in self.strategyList]
html.append(", ".join(snames))
html.append("<br /><br />\n\n<tt>")
p0 = self.population[0]; scale = 1.0/(1000*len(self.population))
for p in self.population:
if abs(p - p0) > scale:
pop = [rr("%1.5f"%s) for s in self.population]
lines = [", ".join(pop[i:i+5]) for i in xrange(0,len(pop),5)]
html.append("<b>population shares:</b><br />\n")
html.append("<br />\n".join(lines))
html.append("<br /><br />\n\n")
break
else:
html.append("uniform population distribution<br /><br />\n\n")
if self.mutators != []:
html.append("<b>mutations:</b><br />\n")
for d in self.mutators:
s1 = str(self.strategyList[d.original])
s2 = str(self.strategyList[d.mutated])
s1 += " " * max(20-len(s1), 1)
s2 += " " * max(20-len(s2), 1)
html.append(s1 + "=> " + s2 + " " + \
("%1.5f" % d.rate).rstrip("0") + "<br />\n")
html.append("<br />\n")
if self.correlation != 0.0:
html.append("correlation:"+" "*8+"%f<br />\n"%self.correlation)
if self.gameNoise != 0.0:
html.append("game Noise:"+" "*9+"%f<br />\n"%self.gameNoise)
if self.noise != 0.0:
html.append("evolutionary Noise: %f<br />\n"%self.noise)
html.append("payoff parameters: " + \
str(self.payoff) + "<br />\n")
html.append("iterations:"+" "*9+"%i<br />\n"%self.iterations)
if self.gameNoise > 0.0 or \
reduce(lambda a,b: a or b.randomizing, self.strategyList, True):
html.append("match samples:"+" "*6+"%i<br />\n"%self.samples)
html.append("</tt></p>\n")
return "".join(html)
###############################################################################
#
# Simulation class
#
###############################################################################
# generator functions to optimize graph drawing
class xaxisIter(object):
"""-> iterate over virtual x-coordinates with one point for
each screen pixel.
"""
def __init__(self, graph, x1, x2):
self.graph = graph
a = self.graph._scaleX(x1); b = self.graph._scaleX(x2)
self.rngIter = xrange(a, b+2).__iter__()
self.pos = self.graph._invX(self.rngIter.next())
def check(self, x):
if x >= self.pos:
try:
self.pos = self.graph._invX(self.rngIter.next())
except StopIteration:
pass # actually this should never happen, catching it anyway!
return True
else: return NO_GRAPH_OPTIMIZATION
def mutation(population, degenList):
"""Apply mutation to a population"""
p = list(population)
for d in degenList:
x = population[d.original] * d.rate
p[d.original] -= x
p[d.mutated] += x
return tuple(p)
NORMAL_CAPTION_PEN = Gfx.BLACK_PEN
SMALL_CAPTION_PEN = Gfx.Pen(color = Gfx.BLACK, fsize = Gfx.SMALL)
class Simulation(object):
"""The simulation class is responsible for running a simulation
and produing an output of the results in graphical form as well
as as html log.
Attributes:
graph : A Graph.Cartesian object for the graphical representation
of the population dynamics
simlpex : A Simplex.Diagram object for producing a simplex
diagram of the population dynamics (only if exactly
three strategies are present in the simulation!)
notifier : A Logging.LogNotificationInterface for communicating
the progress of the simulation to the GUI
log : A Logging.HTMLLog for logging the simulation results
setup : A copy (!) of the simulation setup. (The progress of
the population dynamical simulation is written into
the 'population' field of this copy)
payoffMatrix: The payoff matrix of the tournament part of the
simulation
dynamicsFunction: The dynamics function for the population dynamical
development
rangeStack : Sequence of the respective range parameters of the
simulation graph (x1,y1,x2,y2), one for each call
of the method 'continueSim'
imgDirName : Name of the directory to write the images of the html
log to (only if the log is actually saved to disk!)
simplexName : file name of the simplex graph, if the html log is saved
firstGeneration: the first generation to start the next population
dynamical cycle with (when calling continueSim)
lastGeneration: the last generation of the next cycle
"""
def __init__(self, graph, simplex, log, notifier):
self.graph = graph
self.simplex = simplex
self.notifier = notifier
self.log = log
self.setup = None
self.payoffMatrix = None
self.dynamicsFunction = None
self.rangeStack = []
self.imgdirName = ""
self.simplexName = ""
self.firstGeneration = 1
self.lastGeneration = NUM_GENERATIONS
self._dontLogTwiceFlag = False
def _prepareEvLog(self):
if not self._alreadyLogged:
self._alreadyLogged = True
self.imgdirName = self.setup.fname() + "_images"
self.log.appendAt("toc",
'<a href="#evranking">4. Evolutionary Simulation</a><br />\n')
self.log.append(H2 + \
'<a name="evranking"></a>Evolutionary Simulation:' + \
H2X + "<br />\n\n")
if len(self.setup.strategyList) == 3:
self.simplexName = self.setup.fname() + "_simplex"
path = self.imgdirName + "/" + self.simplexName
self.log.append('<div align="center">' + \
'<a href="'+path+'.png">' + \
'<img src="'+path+'_web.png" alt="Image: '+\
self.simplexName + '.png not found!" />' + \
'</a></div><br /><br />\n')
self.log.entryPoint("evranking")
def newSetup(self, setup, progressCallback = lambda f:1):
self._alreadyLogged = False
self.setup = copy.copy(setup)
for s in self.setup.strategyList: s.register(self.setup)
self.firstGeneration = 1;
self.lastGeneration = NUM_GENERATIONS
if self.setup.cachedPM == None:
self.log.clear()
self.log.pageTitle(self.setup.name)
self.log.append(H1+'<a name="top"></a>CoopSim - Simulation: '+\
self.setup.name + H1X + "\n\n")
self.log.append(H2+'<a name="toc"></a>Table of Contents'+ \
H2X + "\n\n<p>")
self.log.append('<a href="#setup">0. Simulation Setup</a><br />\n')
self.log.entryPoint("toc")
self.log.append("</p><br />\n")
self.log.append(setup.htmlRepresentation())
self.log.append('<div align="right"><a href="#top">[top]' + \
'</a></div><br />\n')
p = self.setup.payoff
a = array([[[p[2],p[2]],[p[0],p[3]]],[[p[3],p[0]],[p[1],p[1]]]])
self.payoffMatrix = PD.GenPayoffMatrix(self.setup.strategyList,
a, self.setup.iterations, self.setup.samples,
self.setup.gameNoise,self.log,progressCallback)
setup.cachedPM = self.payoffMatrix
self.setup.cachedPM = setup.cachedPM
setup.cachedLog = self.log.backup()
self.setup.cachedLog = setup.cachedLog
else:
self.payoffMatrix = self.setup.cachedPM
self.log.replay(self.setup.cachedLog)
self.notifier.updateLog(self.log.getHTMLPage())
## want decimals ?
setcontext(Context(prec=500))
for x in xrange(len(self.setup.population)):
for y in xrange(len(self.setup.population)):
self.payoffMatrix[x, y] = Decimal(repr(self.payoffMatrix[x,y]))
self.setup.correlation = Decimal(repr(self.setup.correlation))
self.setup.noise = Decimal(repr(self.setup.noise))
p = [Decimal(repr(x)) for x in self.setup.population]
self.setup.population = tuple(p)
## end decimals
df = Dynamics.GenDynamicsFunction(self.payoffMatrix,
self.setup.correlation,
self.setup.noise, 2)
if self.setup.mutators == []:
self.dynamicsFunction = df
else:
self.dynamicsFunction = lambda p: mutation(df(p), \
self.setup.mutators)
ysize = 1.0 / max(1.0, len(self.setup.strategyList)-1)
self.graph.reset(0, 0.0, self.lastGeneration, ysize)
self.rangeStack = []
i = 0
for s in self.setup.strategyList:
if Colors.colors[i] == (1.0, 1.0, 0.0): i += 1
self.graph.addPen(str(s), Gfx.Pen(Colors.colors[i], Gfx.MEDIUM),
False)
i += 1
if i > len(Colors.colors): i = 0
self.graph.setTitle('Population dynamics of "'+self.setup.name+'"')
if len(self.setup.strategyList) > 10:
self.graph.setStyle(captionPen = SMALL_CAPTION_PEN, redraw = False)
else:
self.graph.setStyle(captionPen = NORMAL_CAPTION_PEN, redraw = False)
self.graph.redrawCaption()
if len(self.setup.strategyList) == 3:
self.simplex.setFunction(self.dynamicsFunction)
self.simplex.setTitle('Simplex diagram of "'+self.setup.name+'"')
self.simplex.setLabels(str(self.setup.strategyList[0]),
str(self.setup.strategyList[1]),
str(self.setup.strategyList[2]))
else:
self.simplex.setFunction(lambda p:p)
if len(self.setup.strategyList) > 3:
self.simplex.setTitle("Too many strategies for " + \
"a simplex diagram!")
else:
self.simplex.setTitle("Too few strategies for "\
"a simplex diagram!")
self.simplex.setLabels("","","")
self._prepareEvLog()
def continueSim(self, record = None):
if self.setup == None: return
self.notifier.statusBarHint("Running...")
if self.firstGeneration > 1:
self.graph.adjustRange(0, 0.0, self.lastGeneration, self.graph.y2)
else:
k = 0
for s in self.setup.strategyList:
self.graph.addValue(str(s), 0, float(self.setup.population[k]))
k += 1
#self.simplex.show()
p = self.setup.population
pixelSteps = xaxisIter(self.graph, self.firstGeneration,
self.lastGeneration)
for i in xrange(self.firstGeneration, self.lastGeneration+1):
p = self.dynamicsFunction(p)
if pixelSteps.check(i):
k = 0
for s in self.setup.strategyList:
self.graph.addValue(str(s), i, float(p[k]))
k += 1
if record != None: record.append(p)
self.setup.population = p
self._prepareEvLog()
anchor = "generation%i" % self.lastGeneration
linkstr = ' <a href="#'+anchor+'">'+ \
'Ranking after %i generations</a>' % \
self.lastGeneration + "<br />\n"
self.log.appendAt("toc", linkstr)
self.log.appendAt("evranking", re.sub(" ","",linkstr))
self.log.append("\n"+H3+'<a name="'+anchor+'"></a>' +\
"Ranking after %i generations:" % \
self.lastGeneration + H3X + "<br />\n\n<p><pre>")
ranking = zip(self.setup.population,
#Dynamics._QuickFitness2(self.setup.population, self.payoffMatrix),
[str(s) for s in self.setup.strategyList])
ranking.sort(); ranking.reverse()
k = 1
for r, name in ranking:
s = "%3i." % k + name + " "*max(40-len(name),1) + \
"%1.4f " % r + "\n"
self.log.append(s)
k += 1
self.log.append("</pre><br />\n")
imgName = self.setup.fname() + "_gn%i" % self.lastGeneration
path = self.imgdirName + "/" + imgName
self.log.append('<div align="center">'+\
'<a href="' + path + '.png">' + \
'<img src="'+path+'_web.png"'+'" alt="Image: ' + \
imgName + '.png not found!" /></a></div><br />\n')
self.log.append("</p>\n")
self.log.append('<div align="right"><a href="#top">[top]' + \
'</a></div><br />\n')
self.rangeStack.append((imgName, self.graph.x1, self.graph.y1,
self.graph.x2, min(self.graph.y2, 1.0)))
self.notifier.updateLog(self.log.getHTMLPage())
if self.firstGeneration <= 1: self.notifier.logToStart()
self.firstGeneration = self.lastGeneration + 1
self.lastGeneration = self.lastGeneration * 2
self.notifier.statusBarHint("Ready.")
| 45.130631 | 82 | 0.541122 |
2ab56a94f77d9485e44199c70332478423553a6b | 3,638 | py | Python | setup.py | edaa-org/pyEDAA.OutputFilter | ca602c9992b40df7bd117968c0dc333a4f16d255 | [
"Apache-2.0"
] | 1 | 2021-12-30T02:49:43.000Z | 2021-12-30T02:49:43.000Z | setup.py | edaa-org/pyEDAA.OutputFilter | ca602c9992b40df7bd117968c0dc333a4f16d255 | [
"Apache-2.0"
] | null | null | null | setup.py | edaa-org/pyEDAA.OutputFilter | ca602c9992b40df7bd117968c0dc333a4f16d255 | [
"Apache-2.0"
] | null | null | null | # =============================================================================
# _____ ____ _ _
# _ __ _ _| ____| _ \ / \ / \
# | '_ \| | | | _| | | | |/ _ \ / _ \
# | |_) | |_| | |___| |_| / ___ \ / ___ \
# | .__/ \__, |_____|____/_/ \_\/_/ \_\
# |_| |___/
# =============================================================================
# Authors: Patrick Lehmann
#
# Package installer: Post-processing of EDA Tool outputs.
#
# License:
# ============================================================================
# Copyright 2017-2021 Patrick Lehmann - Boetzingen, Germany
# Copyright 2016-2017 Patrick Lehmann - Dresden, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
# ============================================================================
#
from pathlib import Path
from setuptools import setup as setuptools_setup, find_namespace_packages as setuptools_find_namespace_packages
gitHubNamespace = "edaa-org"
projectName = "OutputFilter"
projectNameWithPrefix = "pyEDAA." + projectName
version = "0.1.0"
# Read README for upload to PyPI
readmeFile = Path("README.md")
with readmeFile.open("r") as file:
long_description = file.read()
# Read requirements file and add them to package dependency list
requirementsFile = Path("requirements.txt")
with requirementsFile.open("r") as file:
requirements = [line for line in file.readlines()]
# Derive URLs
sourceCodeURL = "https://github.com/{namespace}/{projectName}".format(namespace=gitHubNamespace, projectName=projectName)
documentationURL = "https://{namespace}.github.io/{projectName}".format(namespace=gitHubNamespace, projectName=projectName)
# Assemble all package information
setuptools_setup(
name=projectNameWithPrefix,
version=version,
author="Patrick Lehmann",
author_email="Paebbels@gmail.com",
# maintainer="Patrick Lehmann",
# maintainer_email="Paebbels@gmail.com",
license='Apache 2.0',
description="Post-processing of EDA Tool outputs.",
long_description=long_description,
long_description_content_type="text/markdown",
url=sourceCodeURL,
project_urls={
'Documentation': documentationURL,
'Source Code': sourceCodeURL,
'Issue Tracker': sourceCodeURL + "/issues"
},
# download_url="https://github.com/vhdl/pyVHDLModel/tarball/0.1.0",
packages=setuptools_find_namespace_packages(exclude=["tests", "tests.*",]),
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Development Status :: 4 - Beta",
# "Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)",
"Topic :: Utilities"
],
keywords="Python3 CLI Output Filter PostProcessing",
python_requires='>=3.6',
install_requires=requirements,
)
| 37.122449 | 125 | 0.644035 |
1f94b165ac1e7f09bc90473e2b3a03319b2cc8d4 | 15,723 | py | Python | src/live_EEG_CNN_180403.py | gumpy-bci/gumpy-online-hybrid-bci | 2a773edba9c5053f77eb5d631f3459cbaadd5b9b | [
"MIT"
] | 3 | 2019-01-12T02:50:40.000Z | 2019-07-20T07:31:29.000Z | src/live_EEG_CNN_180403.py | gumpy-bci/gumpy-online-hybrid-bci | 2a773edba9c5053f77eb5d631f3459cbaadd5b9b | [
"MIT"
] | 1 | 2018-08-13T19:13:18.000Z | 2018-08-13T19:13:18.000Z | src/live_EEG_CNN_180403.py | gumpy-bci/gumpy-online-hybrid-bci | 2a773edba9c5053f77eb5d631f3459cbaadd5b9b | [
"MIT"
] | 3 | 2018-08-10T14:20:29.000Z | 2021-05-25T02:02:34.000Z | # Jonas Braun, jonas.braun@tum.de
# MSNE Research Internship Hybrid BCI
# 03.03´4.2018
# class used for live EEG with a CNN for classification based on CNN-py by Sai Lam Loo
from __future__ import print_function
import sys
sys.path.append('..\..')
import numpy as np
import gumpy
from gumpy.data.nst_eeg_live import NST_EEG_LIVE
#import scipy.io
from scipy.signal import decimate #,butter, lfilter, spectrogram
#import matplotlib.pyplot as plt
import keras
#from keras.utils import plot_model
#from sklearn.model_selection import train_test_split
#from keras.preprocessing import sequence
from keras.models import Sequential, load_model, model_from_json
from keras.layers import Dense, Activation, Flatten, BatchNormalization, Dropout, Conv2D, MaxPooling2D #,LSTM
import keras.utils as ku
from keras.callbacks import ModelCheckpoint, CSVLogger
import kapre
from kapre.time_frequency import Spectrogram
from kapre.utils import Normalization2D
#from kapre.augmentation import AdditiveNoise
from datetime import datetime
import os
import os.path
DEBUG = 1
def check_model(model):
model.summary(line_length=80, positions=[.33, .65, .8, 1.])
batch_input_shape = (2,) + model.input_shape[1:]
batch_output_shape = (2,) + model.output_shape[1:]
model.compile('sgd', 'mse')
model.fit(np.random.uniform(size=batch_input_shape), np.random.uniform(size=batch_output_shape), epochs=1)
###############################################################################
#def load_model(model_directory, model_file_name, weights_file_name):
# #TODO: does not work, but is not required
# try:
# # load trained model
# model_path = model_file_name + ".json"
# if not os.path.isfile(model_path):
# raise IOError('file "%s" does not exist' % (model_path))
# model = model_from_json(open(model_path).read(),custom_objects={'Spectrogram': kapre.time_frequency.Spectrogram})
#
# # load weights of trained model
# model_weight_path = weights_file_name + ".hdf5"
# if not os.path.isfile(model_path):
# raise OSError('file "%s" does not exist' % (model_path))
# model.load_weights(model_weight_path)
#
# return model
# except IOError:
# print(IOError)
# return None
###############################################################################
class liveEEG_CNN():
def __init__(self, data_dir, filename_notlive, n_classes = 2):
self.print_version_info()
self.data_dir = data_dir
self.cwd = os.getcwd()
self.n_classes = n_classes
kwargs = {'n_classes': self.n_classes}
### initialise dataset
self.data_notlive = NST_EEG_LIVE(self.data_dir, filename_notlive,**kwargs)
self.data_notlive.load()
self.data_notlive.print_stats()
self.MODELNAME = "CNN_STFT"
self.x_stacked = np.zeros((1, self.data_notlive.sampling_freq*self.data_notlive.trial_total, 3))
self.y_stacked = np.zeros((1, self.n_classes))
self.fs = 256
self.lowcut = 2
self.highcut = 60
self.anti_drift = 0.5
self.f0 = 50.0 # freq to be removed from signal (Hz) for notch filter
self.Q = 30.0 # quality factor for notch filter
# w0 = f0 / (fs / 2)
self.AXIS = 0
self.CUTOFF = 50.0
self.w0 = self.CUTOFF / (self.fs / 2)
self.dropout = 0.5
### reduce sampling frequency to 256
### most previous data is at 256 Hz, but no it has to be recorded at 512 Hz due to the combination of EMG and EEG
### hence, EEG is downsampled by a factor of 2 here
if self.data_notlive.sampling_freq > self.fs:
self.data_notlive.raw_data = decimate(self.data_notlive.raw_data, int(self.data_notlive.sampling_freq/self.fs), axis=0, zero_phase=True)
self.data_notlive.sampling_freq = self.fs
self.data_notlive.trials = np.floor(self.data_notlive.trials /2).astype(int)
### filter the data
self.data_notlive_filt = gumpy.signal.notch(self.data_notlive.raw_data, self.CUTOFF, self.AXIS)
self.data_notlive_filt = gumpy.signal.butter_highpass(self.data_notlive_filt, self.anti_drift, self.AXIS)
self.data_notlive_filt = gumpy.signal.butter_bandpass(self.data_notlive_filt, self.lowcut, self.highcut, self.AXIS)
#self.min_cols = np.min(self.data_notlive_filt, axis=0)
#self.max_cols = np.max(self.data_notlive_filt, axis=0)
### clip and normalise the data
### keep normalisation constants for lateron (hence no use of gumpy possible)
self.sigma = np.min(np.std(self.data_notlive_filt, axis=0))
self.data_notlive_clip = np.clip(self.data_notlive_filt, self.sigma * (-6), self.sigma * 6)
self.notlive_mean = np.mean(self.data_notlive_clip, axis=0)
self.notlive_std_dev = np.std(self.data_notlive_clip, axis=0)
self.data_notlive_clip = (self.data_notlive_clip-self.notlive_mean)/self.notlive_std_dev
#self.data_notlive_clip = gumpy.signal.normalize(self.data_notlive_clip, 'mean_std')
### extract the time within the trials of 10s for each class
self.class1_mat, self.class2_mat = gumpy.utils.extract_trials_corrJB(self.data_notlive, filtered = self.data_notlive_clip)#, self.data_notlive.trials,
#self.data_notlive.labels, self.data_notlive.trial_total, self.fs)#, nbClasses=self.n_classes)
#TODO: correct function extract_trials() trial len & trial offset
### concatenate data for training and create labels
self.x_train = np.concatenate((self.class1_mat, self.class2_mat))
self.labels_c1 = np.zeros((self.class1_mat.shape[0],))
self.labels_c2 = np.ones((self.class2_mat.shape[0],))
self.y_train = np.concatenate((self.labels_c1, self.labels_c2))
### for categorical crossentropy as an output of the CNN, another format of y is required
self.y_train = ku.to_categorical(self.y_train)
if DEBUG:
print("Shape of x_train: ", self.x_train.shape)
print("Shape of y_train: ", self.y_train.shape)
print("EEG Data loaded and processed successfully!")
### roll shape to match to the CNN
self.x_rolled = np.rollaxis(self.x_train, 2, 1)
if DEBUG:
print('X shape: ', self.x_train.shape)
print('X rolled shape: ', self.x_rolled.shape)
### augment data to have more samples for training
self.x_augmented, self.y_augmented = gumpy.signal.sliding_window(data=self.x_train,
labels=self.y_train, window_sz=4*self.fs, n_hop=self.fs//8, n_start=self.fs*3)
### roll shape to match to the CNN
self.x_augmented_rolled = np.rollaxis(self.x_augmented, 2, 1)
print("Shape of x_augmented: ", self.x_augmented_rolled.shape)
print("Shape of y_augmented: ", self.y_augmented.shape)
### try to load the .json model file, otherwise build a new model
self.loaded = 0
if os.path.isfile(os.path.join(self.cwd,self.MODELNAME+".json")):
self.load_CNN_model()
if self.model:
self.loaded = 1
if self.loaded == 0:
print("Could not load model, will build model.")
self.build_CNN_model()
if self.model:
self.loaded = 1
### Create callbacks for saving
saved_model_name = self.MODELNAME
TMP_NAME = self.MODELNAME + "_" + "_C" + str(self.n_classes)
for i in range(99):
if os.path.isfile(saved_model_name + ".csv"):
saved_model_name = TMP_NAME + "_run{0}".format(i)
### Save model -> json file
json_string = self.model.to_json()
model_file = saved_model_name + ".json"
open(model_file, 'w').write(json_string)
### define where to save the parameters to
model_file = saved_model_name + 'monitoring' + '.h5'
checkpoint = ModelCheckpoint(model_file, monitor='val_loss',
verbose=1, save_best_only=True, mode='min')
log_file = saved_model_name + '.csv'
csv_logger = CSVLogger(log_file, append=True, separator=';')
self.callbacks_list = [csv_logger, checkpoint] # callback list
###############################################################################
### train the model with the notlive data or sinmply load a pretrained model
def fit(self, load=False):
#TODO: use method train_on_batch() to update model
self.batch_size = 32
self.model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
if not load:
print('Train...')
self.model.fit(self.x_augmented_rolled, self.y_augmented,
batch_size=self.batch_size,
epochs=100,
shuffle=True,
validation_split=0.2,
callbacks=self.callbacks_list)
else:
print('Load...')
self.model = keras.models.load_model('CNN_STFTmonitoring.h5',
custom_objects={'Spectrogram': kapre.time_frequency.Spectrogram,
'Normalization2D': kapre.utils.Normalization2D})
#CNN_STFT__C2_run4monitoring.h5
###############################################################################
### do the live classification
def classify_live(self, data_live):
### perform the same preprocessing steps as in __init__()
### agina, donwsampling from 512 to 256 (see above)
if data_live.sampling_freq > self.fs:
data_live.raw_data = decimate(data_live.raw_data, int(self.data_notlive.sampling_freq/self.fs), axis=0, zero_phase=True)
data_live.sampling_freq = self.fs
self.y_live=data_live.labels
self.data_live_filt = gumpy.signal.notch(data_live, self.CUTOFF, self.AXIS)
self.data_live_filt = gumpy.signal.butter_highpass(self.data_live_filt, self.anti_drift, self.AXIS)
self.data_live_filt = gumpy.signal.butter_bandpass(self.data_live_filt, self.lowcut, self.highcut, self.AXIS)
self.data_live_clip = np.clip(self.data_live_filt, self.sigma * (-6), self.sigma * 6)
self.data_live_clip = (self.data_live_clip-self.notlive_mean)/self.notlive_std_dev
class1_mat, class2_mat = gumpy.utils.extract_trials_corrJB(data_live, filtered=self.data_live_clip)
### concatenate data and create labels
self.x_live = np.concatenate((class1_mat, class2_mat))
self.x_live = self.x_live[:,
data_live.mi_interval[0]*data_live.sampling_freq\
:data_live.mi_interval[1]*data_live.sampling_freq, :]
self.x_live = np.rollaxis(self.x_live, 2, 1)
### do the prediction
pred_valid = 0
y_pred = []
pred_true = []
if self.loaded and self.x_live.any():
y_pred = self.model.predict(self.x_live,batch_size=64)
print(y_pred)
#classes = self.model.predict(self.x_live_augmented,batch_size=64)
#pref0 = sum(classes[:,0])
#pref1 = sum(classes[:,1])
#if pref1 > pref0:
# y_pred = 1
#else:
# y_pred = 0
### argmax because output is crossentropy
y_pred = y_pred.argmax()
pred_true = self.y_live == y_pred
print('Real=',self.y_live)
pred_valid = 1
return y_pred, pred_true, pred_valid
###############################################################################
def load_CNN_model(self):
print('Load model', self.MODELNAME)
model_path = self.MODELNAME + ".json"
if not os.path.isfile(model_path):
raise IOError('file "%s" does not exist' % (model_path))
self.model = model_from_json(open(model_path).read(),custom_objects={'Spectrogram': kapre.time_frequency.Spectrogram,
'Normalization2D': kapre.utils.Normalization2D})
#self.model = load_model(self.cwd,self.MODELNAME,self.MODELNAME+'monitoring')
#TODO: get it to work, but not urgently required
#self.model = []
###############################################################################
def build_CNN_model(self):
### define CNN architecture
print('Build model...')
self.model = Sequential()
self.model.add(Spectrogram(n_dft=128, n_hop=16, input_shape=(self.x_augmented_rolled.shape[1:]),
return_decibel_spectrogram=False, power_spectrogram=2.0,
trainable_kernel=False, name='static_stft'))
self.model.add(Normalization2D(str_axis = 'freq'))
# Conv Block 1
self.model.add(Conv2D(filters = 24, kernel_size = (12, 12),
strides = (1, 1), name = 'conv1',
border_mode = 'same'))
self.model.add(BatchNormalization(axis = 1))
self.model.add(MaxPooling2D(pool_size = (2, 2), strides = (2,2), padding = 'valid',
data_format = 'channels_last'))
self.model.add(Activation('relu'))
self.model.add(Dropout(self.dropout))
# Conv Block 2
self.model.add(Conv2D(filters = 48, kernel_size = (8, 8),
name = 'conv2', border_mode = 'same'))
self.model.add(BatchNormalization(axis = 1))
self.model.add(MaxPooling2D(pool_size = (2, 2), strides = (2, 2), padding = 'valid',
data_format = 'channels_last'))
self.model.add(Activation('relu'))
self.model.add(Dropout(self.dropout))
# Conv Block 3
self.model.add(Conv2D(filters = 96, kernel_size = (4, 4),
name = 'conv3', border_mode = 'same'))
self.model.add(BatchNormalization(axis = 1))
self.model.add(MaxPooling2D(pool_size = (2, 2), strides = (2,2),
padding = 'valid',
data_format = 'channels_last'))
self.model.add(Activation('relu'))
self.model.add(Dropout(self.dropout))
# classificator
self.model.add(Flatten())
self.model.add(Dense(self.n_classes)) # two classes only
self.model.add(Activation('softmax'))
print(self.model.summary())
self.saved_model_name = self.MODELNAME
###############################################################################
def print_version_info(self):
now = datetime.now()
print('%s/%s/%s' % (now.year, now.month, now.day))
print('Keras version: {}'.format(keras.__version__))
if keras.backend._BACKEND == 'tensorflow':
import tensorflow
print('Keras backend: {}: {}'.format(keras.backend._backend, tensorflow.__version__))
else:
import theano
print('Keras backend: {}: {}'.format(keras.backend._backend, theano.__version__))
print('Keras image dim ordering: {}'.format(keras.backend.image_dim_ordering()))
print('Kapre version: {}'.format(kapre.__version__))
| 43.675 | 159 | 0.592953 |
91a335832bcc23221e9e0daab27f4cf9deb7da8d | 21,804 | py | Python | tornado/platform/twisted.py | LumbaJack/tornado | fbe23251e87c9984a370518fd7ccf1ca0bb56df1 | [
"Apache-2.0"
] | 1 | 2016-04-27T12:00:48.000Z | 2016-04-27T12:00:48.000Z | tornado/platform/twisted.py | LumbaJack/Ordered-Tornado | fbe23251e87c9984a370518fd7ccf1ca0bb56df1 | [
"Apache-2.0"
] | null | null | null | tornado/platform/twisted.py | LumbaJack/Ordered-Tornado | fbe23251e87c9984a370518fd7ccf1ca0bb56df1 | [
"Apache-2.0"
] | null | null | null | # Author: Ovidiu Predescu
# Date: July 2011
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Bridges between the Twisted reactor and Tornado IOLoop.
This module lets you run applications and libraries written for
Twisted in a Tornado application. It can be used in two modes,
depending on which library's underlying event loop you want to use.
This module has been tested with Twisted versions 11.0.0 and newer.
"""
from __future__ import absolute_import, division, print_function, with_statement
import datetime
import functools
import numbers
import socket
import sys
import twisted.internet.abstract # type: ignore
from twisted.internet.defer import Deferred # type: ignore
from twisted.internet.posixbase import PosixReactorBase # type: ignore
from twisted.internet.interfaces import IReactorFDSet, IDelayedCall, IReactorTime, IReadDescriptor, IWriteDescriptor # type: ignore
from twisted.python import failure, log # type: ignore
from twisted.internet import error # type: ignore
import twisted.names.cache # type: ignore
import twisted.names.client # type: ignore
import twisted.names.hosts # type: ignore
import twisted.names.resolve # type: ignore
from zope.interface import implementer # type: ignore
from tornado.concurrent import Future
from tornado.escape import utf8
from tornado import gen
import tornado.ioloop
from tornado.log import app_log
from tornado.netutil import Resolver
from tornado.stack_context import NullContext, wrap
from tornado.ioloop import IOLoop
from tornado.util import timedelta_to_seconds
@implementer(IDelayedCall)
class TornadoDelayedCall(object):
"""DelayedCall object for Tornado."""
def __init__(self, reactor, seconds, f, *args, **kw):
self._reactor = reactor
self._func = functools.partial(f, *args, **kw)
self._time = self._reactor.seconds() + seconds
self._timeout = self._reactor._io_loop.add_timeout(self._time,
self._called)
self._active = True
def _called(self):
self._active = False
self._reactor._removeDelayedCall(self)
try:
self._func()
except:
app_log.error("_called caught exception", exc_info=True)
def getTime(self):
return self._time
def cancel(self):
self._active = False
self._reactor._io_loop.remove_timeout(self._timeout)
self._reactor._removeDelayedCall(self)
def delay(self, seconds):
self._reactor._io_loop.remove_timeout(self._timeout)
self._time += seconds
self._timeout = self._reactor._io_loop.add_timeout(self._time,
self._called)
def reset(self, seconds):
self._reactor._io_loop.remove_timeout(self._timeout)
self._time = self._reactor.seconds() + seconds
self._timeout = self._reactor._io_loop.add_timeout(self._time,
self._called)
def active(self):
return self._active
@implementer(IReactorTime, IReactorFDSet)
class TornadoReactor(PosixReactorBase):
"""Twisted reactor built on the Tornado IOLoop.
`TornadoReactor` implements the Twisted reactor interface on top of
the Tornado IOLoop. To use it, simply call `install` at the beginning
of the application::
import tornado.platform.twisted
tornado.platform.twisted.install()
from twisted.internet import reactor
When the app is ready to start, call ``IOLoop.current().start()``
instead of ``reactor.run()``.
It is also possible to create a non-global reactor by calling
``tornado.platform.twisted.TornadoReactor(io_loop)``. However, if
the `.IOLoop` and reactor are to be short-lived (such as those used in
unit tests), additional cleanup may be required. Specifically, it is
recommended to call::
reactor.fireSystemEvent('shutdown')
reactor.disconnectAll()
before closing the `.IOLoop`.
.. versionchanged:: 4.1
The ``io_loop`` argument is deprecated.
"""
def __init__(self, io_loop=None):
if not io_loop:
io_loop = tornado.ioloop.IOLoop.current()
self._io_loop = io_loop
self._readers = {} # map of reader objects to fd
self._writers = {} # map of writer objects to fd
self._fds = {} # a map of fd to a (reader, writer) tuple
self._delayedCalls = {}
PosixReactorBase.__init__(self)
self.addSystemEventTrigger('during', 'shutdown', self.crash)
# IOLoop.start() bypasses some of the reactor initialization.
# Fire off the necessary events if they weren't already triggered
# by reactor.run().
def start_if_necessary():
if not self._started:
self.fireSystemEvent('startup')
self._io_loop.add_callback(start_if_necessary)
# IReactorTime
def seconds(self):
return self._io_loop.time()
def callLater(self, seconds, f, *args, **kw):
dc = TornadoDelayedCall(self, seconds, f, *args, **kw)
self._delayedCalls[dc] = True
return dc
def getDelayedCalls(self):
return [x for x in self._delayedCalls if x._active]
def _removeDelayedCall(self, dc):
if dc in self._delayedCalls:
del self._delayedCalls[dc]
# IReactorThreads
def callFromThread(self, f, *args, **kw):
assert callable(f), "%s is not callable" % f
with NullContext():
# This NullContext is mainly for an edge case when running
# TwistedIOLoop on top of a TornadoReactor.
# TwistedIOLoop.add_callback uses reactor.callFromThread and
# should not pick up additional StackContexts along the way.
self._io_loop.add_callback(f, *args, **kw)
# We don't need the waker code from the super class, Tornado uses
# its own waker.
def installWaker(self):
pass
def wakeUp(self):
pass
# IReactorFDSet
def _invoke_callback(self, fd, events):
if fd not in self._fds:
return
(reader, writer) = self._fds[fd]
if reader:
err = None
if reader.fileno() == -1:
err = error.ConnectionLost()
elif events & IOLoop.READ:
err = log.callWithLogger(reader, reader.doRead)
if err is None and events & IOLoop.ERROR:
err = error.ConnectionLost()
if err is not None:
self.removeReader(reader)
reader.readConnectionLost(failure.Failure(err))
if writer:
err = None
if writer.fileno() == -1:
err = error.ConnectionLost()
elif events & IOLoop.WRITE:
err = log.callWithLogger(writer, writer.doWrite)
if err is None and events & IOLoop.ERROR:
err = error.ConnectionLost()
if err is not None:
self.removeWriter(writer)
writer.writeConnectionLost(failure.Failure(err))
def addReader(self, reader):
if reader in self._readers:
# Don't add the reader if it's already there
return
fd = reader.fileno()
self._readers[reader] = fd
if fd in self._fds:
(_, writer) = self._fds[fd]
self._fds[fd] = (reader, writer)
if writer:
# We already registered this fd for write events,
# update it for read events as well.
self._io_loop.update_handler(fd, IOLoop.READ | IOLoop.WRITE)
else:
with NullContext():
self._fds[fd] = (reader, None)
self._io_loop.add_handler(fd, self._invoke_callback,
IOLoop.READ)
def addWriter(self, writer):
if writer in self._writers:
return
fd = writer.fileno()
self._writers[writer] = fd
if fd in self._fds:
(reader, _) = self._fds[fd]
self._fds[fd] = (reader, writer)
if reader:
# We already registered this fd for read events,
# update it for write events as well.
self._io_loop.update_handler(fd, IOLoop.READ | IOLoop.WRITE)
else:
with NullContext():
self._fds[fd] = (None, writer)
self._io_loop.add_handler(fd, self._invoke_callback,
IOLoop.WRITE)
def removeReader(self, reader):
if reader in self._readers:
fd = self._readers.pop(reader)
(_, writer) = self._fds[fd]
if writer:
# We have a writer so we need to update the IOLoop for
# write events only.
self._fds[fd] = (None, writer)
self._io_loop.update_handler(fd, IOLoop.WRITE)
else:
# Since we have no writer registered, we remove the
# entry from _fds and unregister the handler from the
# IOLoop
del self._fds[fd]
self._io_loop.remove_handler(fd)
def removeWriter(self, writer):
if writer in self._writers:
fd = self._writers.pop(writer)
(reader, _) = self._fds[fd]
if reader:
# We have a reader so we need to update the IOLoop for
# read events only.
self._fds[fd] = (reader, None)
self._io_loop.update_handler(fd, IOLoop.READ)
else:
# Since we have no reader registered, we remove the
# entry from the _fds and unregister the handler from
# the IOLoop.
del self._fds[fd]
self._io_loop.remove_handler(fd)
def removeAll(self):
return self._removeAll(self._readers, self._writers)
def getReaders(self):
return self._readers.keys()
def getWriters(self):
return self._writers.keys()
# The following functions are mainly used in twisted-style test cases;
# it is expected that most users of the TornadoReactor will call
# IOLoop.start() instead of Reactor.run().
def stop(self):
PosixReactorBase.stop(self)
fire_shutdown = functools.partial(self.fireSystemEvent, "shutdown")
self._io_loop.add_callback(fire_shutdown)
def crash(self):
PosixReactorBase.crash(self)
self._io_loop.stop()
def doIteration(self, delay):
raise NotImplementedError("doIteration")
def mainLoop(self):
# Since this class is intended to be used in applications
# where the top-level event loop is ``io_loop.start()`` rather
# than ``reactor.run()``, it is implemented a little
# differently than other Twisted reactors. We override
# ``mainLoop`` instead of ``doIteration`` and must implement
# timed call functionality on top of `.IOLoop.add_timeout`
# rather than using the implementation in
# ``PosixReactorBase``.
self._io_loop.start()
class _TestReactor(TornadoReactor):
"""Subclass of TornadoReactor for use in unittests.
This can't go in the test.py file because of import-order dependencies
with the Twisted reactor test builder.
"""
def __init__(self):
# always use a new ioloop
super(_TestReactor, self).__init__(IOLoop())
def listenTCP(self, port, factory, backlog=50, interface=''):
# default to localhost to avoid firewall prompts on the mac
if not interface:
interface = '127.0.0.1'
return super(_TestReactor, self).listenTCP(
port, factory, backlog=backlog, interface=interface)
def listenUDP(self, port, protocol, interface='', maxPacketSize=8192):
if not interface:
interface = '127.0.0.1'
return super(_TestReactor, self).listenUDP(
port, protocol, interface=interface, maxPacketSize=maxPacketSize)
def install(io_loop=None):
"""Install this package as the default Twisted reactor.
``install()`` must be called very early in the startup process,
before most other twisted-related imports. Conversely, because it
initializes the `.IOLoop`, it cannot be called before
`.fork_processes` or multi-process `~.TCPServer.start`. These
conflicting requirements make it difficult to use `.TornadoReactor`
in multi-process mode, and an external process manager such as
``supervisord`` is recommended instead.
.. versionchanged:: 4.1
The ``io_loop`` argument is deprecated.
"""
if not io_loop:
io_loop = tornado.ioloop.IOLoop.current()
reactor = TornadoReactor(io_loop)
from twisted.internet.main import installReactor # type: ignore
installReactor(reactor)
return reactor
@implementer(IReadDescriptor, IWriteDescriptor)
class _FD(object):
def __init__(self, fd, fileobj, handler):
self.fd = fd
self.fileobj = fileobj
self.handler = handler
self.reading = False
self.writing = False
self.lost = False
def fileno(self):
return self.fd
def doRead(self):
if not self.lost:
self.handler(self.fileobj, tornado.ioloop.IOLoop.READ)
def doWrite(self):
if not self.lost:
self.handler(self.fileobj, tornado.ioloop.IOLoop.WRITE)
def connectionLost(self, reason):
if not self.lost:
self.handler(self.fileobj, tornado.ioloop.IOLoop.ERROR)
self.lost = True
def logPrefix(self):
return ''
class TwistedIOLoop(tornado.ioloop.IOLoop):
"""IOLoop implementation that runs on Twisted.
`TwistedIOLoop` implements the Tornado IOLoop interface on top of
the Twisted reactor. Recommended usage::
from tornado.platform.twisted import TwistedIOLoop
from twisted.internet import reactor
TwistedIOLoop().install()
# Set up your tornado application as usual using `IOLoop.instance`
reactor.run()
Uses the global Twisted reactor by default. To create multiple
``TwistedIOLoops`` in the same process, you must pass a unique reactor
when constructing each one.
Not compatible with `tornado.process.Subprocess.set_exit_callback`
because the ``SIGCHLD`` handlers used by Tornado and Twisted conflict
with each other.
"""
def initialize(self, reactor=None, **kwargs):
super(TwistedIOLoop, self).initialize(**kwargs)
if reactor is None:
import twisted.internet.reactor # type: ignore
reactor = twisted.internet.reactor
self.reactor = reactor
self.fds = {}
def close(self, all_fds=False):
fds = self.fds
self.reactor.removeAll()
for c in self.reactor.getDelayedCalls():
c.cancel()
if all_fds:
for fd in fds.values():
self.close_fd(fd.fileobj)
def add_handler(self, fd, handler, events):
if fd in self.fds:
raise ValueError('fd %s added twice' % fd)
fd, fileobj = self.split_fd(fd)
self.fds[fd] = _FD(fd, fileobj, wrap(handler))
if events & tornado.ioloop.IOLoop.READ:
self.fds[fd].reading = True
self.reactor.addReader(self.fds[fd])
if events & tornado.ioloop.IOLoop.WRITE:
self.fds[fd].writing = True
self.reactor.addWriter(self.fds[fd])
def update_handler(self, fd, events):
fd, fileobj = self.split_fd(fd)
if events & tornado.ioloop.IOLoop.READ:
if not self.fds[fd].reading:
self.fds[fd].reading = True
self.reactor.addReader(self.fds[fd])
else:
if self.fds[fd].reading:
self.fds[fd].reading = False
self.reactor.removeReader(self.fds[fd])
if events & tornado.ioloop.IOLoop.WRITE:
if not self.fds[fd].writing:
self.fds[fd].writing = True
self.reactor.addWriter(self.fds[fd])
else:
if self.fds[fd].writing:
self.fds[fd].writing = False
self.reactor.removeWriter(self.fds[fd])
def remove_handler(self, fd):
fd, fileobj = self.split_fd(fd)
if fd not in self.fds:
return
self.fds[fd].lost = True
if self.fds[fd].reading:
self.reactor.removeReader(self.fds[fd])
if self.fds[fd].writing:
self.reactor.removeWriter(self.fds[fd])
del self.fds[fd]
def start(self):
old_current = IOLoop.current(instance=False)
try:
self._setup_logging()
self.make_current()
self.reactor.run()
finally:
if old_current is None:
IOLoop.clear_current()
else:
old_current.make_current()
def stop(self):
self.reactor.crash()
def add_timeout(self, deadline, callback, *args, **kwargs):
# This method could be simplified (since tornado 4.0) by
# overriding call_at instead of add_timeout, but we leave it
# for now as a test of backwards-compatibility.
if isinstance(deadline, numbers.Real):
delay = max(deadline - self.time(), 0)
elif isinstance(deadline, datetime.timedelta):
delay = timedelta_to_seconds(deadline)
else:
raise TypeError("Unsupported deadline %r")
return self.reactor.callLater(
delay, self._run_callback,
functools.partial(wrap(callback), *args, **kwargs))
def remove_timeout(self, timeout):
if timeout.active():
timeout.cancel()
def add_callback(self, callback, *args, **kwargs):
self.reactor.callFromThread(
self._run_callback,
functools.partial(wrap(callback), *args, **kwargs))
def add_callback_from_signal(self, callback, *args, **kwargs):
self.add_callback(callback, *args, **kwargs)
class TwistedResolver(Resolver):
"""Twisted-based asynchronous resolver.
This is a non-blocking and non-threaded resolver. It is
recommended only when threads cannot be used, since it has
limitations compared to the standard ``getaddrinfo``-based
`~tornado.netutil.Resolver` and
`~tornado.netutil.ThreadedResolver`. Specifically, it returns at
most one result, and arguments other than ``host`` and ``family``
are ignored. It may fail to resolve when ``family`` is not
``socket.AF_UNSPEC``.
Requires Twisted 12.1 or newer.
.. versionchanged:: 4.1
The ``io_loop`` argument is deprecated.
"""
def initialize(self, io_loop=None):
self.io_loop = io_loop or IOLoop.current()
# partial copy of twisted.names.client.createResolver, which doesn't
# allow for a reactor to be passed in.
self.reactor = tornado.platform.twisted.TornadoReactor(io_loop)
host_resolver = twisted.names.hosts.Resolver('/etc/hosts')
cache_resolver = twisted.names.cache.CacheResolver(reactor=self.reactor)
real_resolver = twisted.names.client.Resolver('/etc/resolv.conf',
reactor=self.reactor)
self.resolver = twisted.names.resolve.ResolverChain(
[host_resolver, cache_resolver, real_resolver])
@gen.coroutine
def resolve(self, host, port, family=0):
# getHostByName doesn't accept IP addresses, so if the input
# looks like an IP address just return it immediately.
if twisted.internet.abstract.isIPAddress(host):
resolved = host
resolved_family = socket.AF_INET
elif twisted.internet.abstract.isIPv6Address(host):
resolved = host
resolved_family = socket.AF_INET6
else:
deferred = self.resolver.getHostByName(utf8(host))
resolved = yield gen.Task(deferred.addBoth)
if isinstance(resolved, failure.Failure):
resolved.raiseException()
elif twisted.internet.abstract.isIPAddress(resolved):
resolved_family = socket.AF_INET
elif twisted.internet.abstract.isIPv6Address(resolved):
resolved_family = socket.AF_INET6
else:
resolved_family = socket.AF_UNSPEC
if family != socket.AF_UNSPEC and family != resolved_family:
raise Exception('Requested socket family %d but got %d' %
(family, resolved_family))
result = [
(resolved_family, (resolved, port)),
]
raise gen.Return(result)
if hasattr(gen.convert_yielded, 'register'):
@gen.convert_yielded.register(Deferred) # type: ignore
def _(d):
f = Future()
def errback(failure):
try:
failure.raiseException()
# Should never happen, but just in case
raise Exception("errback called without error")
except:
f.set_exc_info(sys.exc_info())
d.addCallbacks(f.set_result, errback)
return f
| 37.271795 | 132 | 0.624473 |
8121b9391015d04f246360c17d0f71629f6e52cc | 5,138 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_04_01/aio/operations/_express_route_service_providers_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 3 | 2020-06-23T02:25:27.000Z | 2021-09-07T18:48:11.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_04_01/aio/operations/_express_route_service_providers_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 510 | 2019-07-17T16:11:19.000Z | 2021-08-02T08:38:32.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_04_01/aio/operations/_express_route_service_providers_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 5 | 2019-09-04T12:51:37.000Z | 2020-09-16T07:28:40.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteServiceProvidersOperations:
"""ExpressRouteServiceProvidersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs
) -> AsyncIterable["_models.ExpressRouteServiceProviderListResult"]:
"""Gets all the available express route service providers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteServiceProviderListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_04_01.models.ExpressRouteServiceProviderListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteServiceProviderListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteServiceProviderListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteServiceProviders'} # type: ignore
| 47.137615 | 135 | 0.66991 |
a5b1b685157999ae5776007b76ddcffa58a7c9ee | 1,494 | py | Python | tests/materialPoints_xml.py | kinnder/processPlanning | bb50a5e26545d68de88516178435b922e44cf4e8 | [
"MIT"
] | null | null | null | tests/materialPoints_xml.py | kinnder/processPlanning | bb50a5e26545d68de88516178435b922e44cf4e8 | [
"MIT"
] | null | null | null | tests/materialPoints_xml.py | kinnder/processPlanning | bb50a5e26545d68de88516178435b922e44cf4e8 | [
"MIT"
] | null | null | null | from subprocess import check_output
application: str = 'application-0.2.0.jar'
task_domain: str = 'materialPoints'
file_format: str = 'xml'
task_description_file: str = task_domain + '_td.' + file_format
system_transformation_file: str = task_domain + '_st.' + file_format
node_network_file: str = task_domain + '_nn.' + file_format
process_file: str = task_domain + '_p.' + file_format
print(task_domain + ' ' + file_format + ' started')
check_output(['java',
'-jar', application,
'-new_td',
'-d', task_domain,
'-td', task_description_file])
check_output(['java',
'-jar', application,
'-new_st',
'-d', task_domain,
'-st', system_transformation_file])
check_output(['java',
'-jar', application,
'-plan',
'-td', task_description_file,
'-st', system_transformation_file,
'-nn', node_network_file,
'-p', process_file])
check_output(['java',
'-jar', application,
'-verify',
'-td', task_description_file,
'-st', system_transformation_file,
'-nn', node_network_file,
'-p', process_file])
with open(task_description_file):
pass
with open(system_transformation_file):
pass
with open(node_network_file):
pass
with open(process_file):
pass
print(task_domain + ' ' + file_format + ' completed')
| 26.210526 | 68 | 0.583668 |
cbca641476202134ffb62a43d51ca43b1b56bb8b | 5,069 | py | Python | controllers/designer.py | rhullcsa/RunestoneServer | db1ad6078f855a058e735bdbc6c1f130f1a34ee7 | [
"MIT"
] | null | null | null | controllers/designer.py | rhullcsa/RunestoneServer | db1ad6078f855a058e735bdbc6c1f130f1a34ee7 | [
"MIT"
] | null | null | null | controllers/designer.py | rhullcsa/RunestoneServer | db1ad6078f855a058e735bdbc6c1f130f1a34ee7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
from os import path
import random
import datetime
import logging
logger = logging.getLogger(settings.logger)
logger.setLevel(settings.log_level)
admin_logger(logger)
#########################################################################
## This is a samples controller
## - index is the default action of any application
## - user is required for authentication and authorization
## - download is for downloading files uploaded in the db (does streaming)
## - call exposes all registered services (none by default)
#########################################################################
@auth.requires_login()
def index():
basicvalues = {}
if settings.academy_mode:
"""
example action using the internationalization operator T and flash
rendered by views/default/index.html or views/generic.html
"""
# response.flash = "Welcome to CourseWare Manager!"
basicvalues["message"] = T("Build a Custom Course")
basicvalues["descr"] = T(
"""This page allows you to select a book for your own class. You will have access to all student activities in your course.
To begin, enter a project name below."""
)
# return dict(message=T('Welcome to CourseWare Manager'))
return basicvalues
@auth.requires_login()
def build():
buildvalues = {}
if settings.academy_mode:
buildvalues["pname"] = request.vars.projectname
buildvalues["pdescr"] = request.vars.projectdescription
existing_course = (
db(db.courses.course_name == request.vars.projectname).select().first()
)
if existing_course:
session.flash = (
f"course name {request.vars.projectname} has already been used"
)
redirect(URL("designer", "index"))
if not request.vars.coursetype:
session.flash = "You must select a base course."
redirect(URL("designer", "index"))
# if make instructor add row to auth_membership
if "instructor" in request.vars:
gid = (
db(db.auth_group.role == "instructor").select(db.auth_group.id).first()
)
db.auth_membership.insert(user_id=auth.user.id, group_id=gid)
base_course = request.vars.coursetype
bcdb = db(db.courses.course_name == base_course).select().first()
if request.vars.startdate == "":
request.vars.startdate = datetime.date.today()
else:
date = request.vars.startdate.split("/")
request.vars.startdate = datetime.date(
int(date[2]), int(date[0]), int(date[1])
)
if not request.vars.institution:
institution = "Not Provided"
else:
institution = request.vars.institution
if not request.vars.courselevel:
courselevel = "unknown"
else:
courselevel = request.vars.courselevel
python3 = "true"
if not request.vars.loginreq:
login_required = "false"
else:
login_required = "true"
# TODO: Update new_server after full away from old server
cid = db.courses.update_or_insert(
course_name=request.vars.projectname,
term_start_date=request.vars.startdate,
institution=institution,
base_course=base_course,
login_required=login_required,
python3=python3,
courselevel=courselevel,
new_server=True if settings.running_bookserver else False,
)
origin = getCourseOrigin(base_course)
if origin and origin.value == "PreTeXt":
origin_attrs = getCourseAttributesDict(bcdb.id)
for key in origin_attrs:
db.course_attributes.insert(
course_id=cid, attr=key, value=origin_attrs[key]
)
if request.vars.invoice:
db.invoice_request.insert(
timestamp=datetime.datetime.now(),
sid=auth.user.username,
email=auth.user.email,
course_name=request.vars.projectname,
)
# enrol the user in their new course
db(db.auth_user.id == auth.user.id).update(course_id=cid)
db.course_instructor.insert(instructor=auth.user.id, course=cid)
auth.user.update(
course_name=request.vars.projectname
) # also updates session info
auth.user.update(course_id=cid)
db.executesql(
"""
INSERT INTO user_courses(user_id, course_id)
SELECT %s, %s
""",
(auth.user.id, cid),
)
session.flash = "Course Created Successfully"
# redirect(
# URL("books", "published", args=[request.vars.projectname, "index.html"])
# )
return dict(coursename=request.vars.projectname, basecourse=base_course)
| 35.447552 | 135 | 0.591241 |
2faab18c5e9c205b059c990eaa37293abfbd4acb | 2,100 | py | Python | socialregistration/models.py | leopd/django-socialregistration | cec489cfbd44094a99a8fc00cc5a703fbdc2474a | [
"MIT"
] | 1 | 2020-03-08T19:31:57.000Z | 2020-03-08T19:31:57.000Z | socialregistration/models.py | leopd/django-socialregistration | cec489cfbd44094a99a8fc00cc5a703fbdc2474a | [
"MIT"
] | null | null | null | socialregistration/models.py | leopd/django-socialregistration | cec489cfbd44094a99a8fc00cc5a703fbdc2474a | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
class FacebookProfile(models.Model):
user = models.ForeignKey(User)
site = models.ForeignKey(Site, default=Site.objects.get_current)
uid = models.CharField(max_length=255, blank=False, null=False)
oauth_access_token = models.CharField(max_length=255, blank=False, null=False)
def __unicode__(self):
return u'%s: %s' % (self.user, self.uid)
def authenticate(self):
return authenticate(uid=self.uid)
class TwitterProfile(models.Model):
user = models.ForeignKey(User)
site = models.ForeignKey(Site, default=Site.objects.get_current)
twitter_id = models.PositiveIntegerField()
def __unicode__(self):
return u'%s: %s' % (self.user, self.twitter_id)
def authenticate(self):
return authenticate(twitter_id=self.twitter_id)
class OpenIDProfile(models.Model):
user = models.ForeignKey(User)
site = models.ForeignKey(Site, default=Site.objects.get_current)
identity = models.TextField()
def __unicode__(self):
return u'OpenID Profile for %s, via provider %s' % (self.user, self.identity)
def authenticate(self):
return authenticate(identity=self.identity)
class OpenIDStore(models.Model):
site = models.ForeignKey(Site, default=Site.objects.get_current)
server_url = models.CharField(max_length=255)
handle = models.CharField(max_length=255)
secret = models.TextField()
issued = models.IntegerField()
lifetime = models.IntegerField()
assoc_type = models.TextField()
def __unicode__(self):
return u'OpenID Store %s for %s' % (self.server_url, self.site)
class OpenIDNonce(models.Model):
server_url = models.CharField(max_length=255)
timestamp = models.IntegerField()
salt = models.CharField(max_length=255)
date_created = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return u'OpenID Nonce for %s' % self.server_url
| 34.42623 | 85 | 0.71381 |
26c27af8f3ea9532c0cdeb6fdf2b190a9112ccd6 | 22,174 | py | Python | phovea_server/range.py | phovea/phovea_server | f83879f58669ff4d554efcb727b1c6fd0185041a | [
"BSD-3-Clause"
] | 3 | 2018-06-08T01:28:56.000Z | 2020-01-10T14:17:34.000Z | phovea_server/range.py | phovea/phovea_server | f83879f58669ff4d554efcb727b1c6fd0185041a | [
"BSD-3-Clause"
] | 88 | 2016-11-06T08:28:21.000Z | 2022-03-22T07:18:59.000Z | phovea_server/range.py | phovea/phovea_server | f83879f58669ff4d554efcb727b1c6fd0185041a | [
"BSD-3-Clause"
] | 6 | 2017-06-06T20:43:00.000Z | 2020-02-13T18:23:46.000Z | ###############################################################################
# Caleydo - Visualization for Molecular Biology - http://caleydo.org
# Copyright (c) The Caleydo Team. All rights reserved.
# Licensed under the new BSD license, available at http://caleydo.org/license
###############################################################################
from builtins import str
from builtins import range as number_range
from builtins import object
import itertools
from functools import reduce, cmp_to_key
from numpy import NaN, isnan
all_f = all
def fix(v, size=0):
return v if v >= 0 else (size + 1 + v)
class SingleRangeElem(object):
def __init__(self, val):
self.start = val
@property
def end(self):
return self.start + 1
@property
def step(self):
return 1
def asslice(self):
return self.start
@property
def isall(self):
return False
@property
def issingle(self):
return True
@property
def isunbound(self):
return False
def __len__(self):
return 1
@staticmethod
def size(size=0):
return 1
def reverse(self):
return SingleRangeElem(self.start)
def invert(self, index, size=0):
return fix(self.start, size) + index
def __iter__(self):
return self.iter()
def iter(self, size=0):
return iter([self.start])
def contains(self, value, size=0):
return fix(self.start, size) == value
def __in__(self, value):
return self.contains(value)
def __str__(self):
return str(self.start)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.start == other.start
return False
def copy(self):
return self.__copy__()
def __copy__(self):
return SingleRangeElem(self.start)
class RangeElem(object):
def __init__(self, start, end=-1, step=1):
self.start = start
self.end = end
self.step = step
@property
def isall(self):
return self.start == 0 and self.end == -1 and self.step == 1
def asslice(self):
return slice(self.start, self.end, self.step)
@property
def issingle(self):
return (self.start + self.step) == self.end
@property
def isunbound(self):
return self.start < 0 or self.end < 0
@staticmethod
def all():
return RangeElem(0)
@staticmethod
def none():
return RangeElem(0, 0)
@staticmethod
def single(val):
return SingleRangeElem(val)
@staticmethod
def range(start, end=-1, step=1):
if (start + step) == end:
return RangeElem.single(start)
return RangeElem(start, end, step)
def __len__(self):
return self.size()
def size(self, size=NaN):
t = fix(self.end, size)
f = fix(self.start, size)
if self.step == 1:
return max(t - f, 0)
elif self.step == -1:
if self.end == -1:
return max(f - -1, 0)
return max(f - t, 0)
d = t - f + 1 if self.step > 0 else f - t + 1
s = abs(self.step)
if d <= 0:
return 0
return (d // s)
def reverse(self):
if self.start > 0:
t = self.start - 1
f = self.end - 1
return RangeElem(f, t, - self.step)
else: # step < 0
t = self.start - 1
f = self.end - 1
return RangeElem(f, t, - self.step)
def invert(self, index, size=0):
if self.isall:
return index
return fix(self.start, size) + index * self.step
def __iter__(self):
return self.iter()
def iter(self, size=0):
if self.step < 0 and self.end == -1:
# keep negative to have 0 included
return iter(number_range(fix(self.start, size), -1, self.step))
return iter(number_range(fix(self.start, size), fix(self.end, size), self.step))
def contains(self, value, size=NaN):
if self.isall:
return True
f = fix(self.start, size)
t = fix(self.end, size)
if self.step == -1:
if self.end == -1:
return 0 <= value <= f
return (value <= f) and (value > t)
elif self.step == 1:
return (value >= f) and (value < t)
else:
return value in list(self.iter(size))
def __in__(self, value):
return self.contains(value)
def __str__(self):
if self.isall:
return ''
if self.issingle:
return str(self.start)
r = str(self.start) + ':' + str(self.end)
if self.step != 1:
r = r + ':' + str(self.step)
return r
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.start == other.start and self.end == other.end and self.step == other.step
return False
def copy(self):
return self.__copy__()
def __copy__(self):
return RangeElem(self.start, self.end, self.step)
@staticmethod
def parse(code):
if len(code) == 0:
return RangeElem.all()
def parse_elem(v, default_value=None):
v = v.strip()
if len(v) == 0 and default_value is not None:
return default_value
try:
return int(v)
except ValueError:
raise Exception('parse error: "' + v + '" is not a valid integer')
parts = code.split(':')
if len(parts) == 1:
return RangeElem.single(parse_elem(parts[0]))
elif len(parts) == 2:
return RangeElem(parse_elem(parts[0], 0), parse_elem(parts[1], -1))
elif len(parts) == 3:
return RangeElem(parse_elem(parts[0], 0), parse_elem(parts[1], -1), parse_elem(parts[2], 1))
raise Exception('parse error: "' + code + '" is not a valid range specifier')
class Range1D(object):
def __init__(self, arg=None):
if isinstance(arg, list):
self._elems = arg
elif isinstance(arg, Range1D):
self._elems = arg._elems
else:
self._elems = []
def __len__(self):
return self.size()
def copy(self):
return Range1D(self._elems[:])
def __copy__(self):
return self.copy()
def size(self, size=NaN):
if isnan(size) and self.isunbound:
return NaN
return reduce(lambda s, x: s + x.size(size), self._elems, 0)
@staticmethod
def all():
return Range1D([RangeElem.all()])
@staticmethod
def single(item):
return Range1D([RangeElem.single(item)])
@staticmethod
def none():
return Range1D()
@staticmethod
def from_list(indices):
return Range1D(Range1D._compress(indices))
@staticmethod
def _compress(indices):
l = len(indices)
if l == 0:
return []
elif l == 1:
return [RangeElem.single(indices[0])]
r = []
deltas = [e - indices[i] for i, e in enumerate(indices[1:])]
start = 0
act = 1
while act < l:
while act < l and deltas[start] == deltas[act - 1]: # while the same delta
act += 1
if act == start + 1: # just a single item used
r.append(RangeElem.single(indices[start]))
else:
# +1 since end is excluded
# fix while just +1 is allowed and -1 is not allowed
if deltas[start] == 1:
r.append(RangeElem.range(indices[start], indices[act - 1] + deltas[start], deltas[start]))
else:
for i in number_range(start, act):
r.append(RangeElem.single(indices[i]))
start = act
act += 1
while start < len(indices): # corner case by adding act+1, it might happened that last one isn't considered
r.append(RangeElem.single(indices[start]))
start += 1
return r
@property
def isall(self):
return len(self._elems) == 1 and self[0].isall
@property
def isnone(self):
return len(self._elems) == 0
@property
def isunbound(self):
return any((d.isunbound for d in self._elems))
@property
def _islist(self):
return not any(not d.issingle for d in self._elems)
def append(self, *args):
def convert(p):
if isinstance(p, str):
return RangeElem.parse(p)
elif isinstance(p, int):
return RangeElem.single(p)
elif isinstance(p, list):
return RangeElem.range(p[0], p[1], p[2])
return p
self._elems.extend((convert(p) for p in args))
def push_slice(self, start, end=-1, step=1):
self._elems.append(RangeElem(start, end, step))
def push_list(self, indices):
self._elems.extend(Range1D._compress(indices))
def set_slice(self, start, end=-1, step=1):
self._elems = []
self.push_slice(start, end, step)
def set_list(self, indices):
self._elems = []
self.push_list(indices)
def __getitem__(self, i):
if i < 0:
i += len(self._elems)
if i < 0 or i >= len(self._elems):
return RangeElem.none()
return self._elems[i]
def asslice(self, no_ellipsis=False):
if self.isall:
return slice(0, -1) if no_ellipsis else Ellipsis
if self.isnone:
return []
if len(self._elems) == 1:
return self._elems[0].asslice()
return self.tolist()
@property
def is_identity_range(self):
return len(self._elems) == 1 and self._elems[0].start == 0 and self._elems[0].step == 1
def repeat(self, ntimes=1):
if ntimes == 1:
return self
r = []
for i in number_range(ntimes):
r.extend(self._elems)
return Range1D(r)
def pre_multiply(self, sub, size=0):
if self.isall:
return sub
if sub.isall:
return self
if self.is_identity_range: # identity lookup
return sub
# TODO optimize
l = list(self.iter(size))
s = sub.iter(len(l))
r = []
while s.hasNext():
i = next(s)
if 0 <= i < len(l): # check for out of range
r.append(l[i])
return Range1D.from_list(r)
def union(self, other, size=0):
if self.isall or other.isnone:
return self
if other.isall or self.isnone:
return other
r = list(self.iter(size))
it2 = other.iter(size)
for i in it2:
if i not in r:
r.append(i)
return Range1D.from_list(sorted(r))
def intersect(self, other, size=0):
if self.isnone or other.isnone:
return Range1D.none()
if self.isall:
return other
if other.isall:
return self
it1 = list(self.iter(size))
it2 = other.iter(size)
r = [i for i in it2 if i in it1]
return Range1D.from_list(sorted(r))
def without(self, without, size=0):
if self.isnone or without.isnone:
return self.copy()
if without.isall:
return Range1D.none()
it1 = self.iter(size)
it2 = list(without.iter(size))
r = [i for i in it1 if i not in it2]
return Range1D.from_list(sorted(r))
def invert(self, index, size=0):
if self.isall:
return index
if self.isnone:
return -1 # not mapped
act = 0
s = self._elems[0].size(size)
total = s
while total > index and act < len(self):
act += 1
s = self._elems[act].size(size)
total += s
if act >= len(self._elems):
return -1 # not mapped
return self._elems[act - 1].invert(index - total + s, size)
def index(self, *args):
if len(args) == 0:
return []
if isinstance(args[0], Range1D):
return self.index_range_of(args[0], args[1])
base = list(self.iter())
if len(args) == 1:
if type(args[0]) is int:
return base.index(args[0])
arr = args[0]
else:
arr = args
return [base.index(index) for index in arr]
def index_range_of(self, r, size=0):
if r.isnone or self.isnone:
return Range1D.none()
if self.is_identity_range:
end = self._elems[0].end
result = [d for d in r if 0 <= d < end]
else:
arr = list(self.iter())
result = [arr.index(d) for d in arr if d in arr]
return Range1D.from_list(result)
def filter(self, data, size, transform=lambda x: x):
if self.isall:
return [transform(x) for x in data]
return [transform(data[i]) for i in self.iter(size)]
def iter(self, size=0):
if self._islist:
return (d.start for d in self._elems)
else:
return itertools.chain(*[d.iter(size) for d in self._elems])
def __iter__(self):
return self.iter()
def tolist(self, size=0):
return list(self.iter(size))
def contains(self, value, size=0):
return any(elem.contains(value, size) for elem in self._elems)
def sort(self, cmp):
arr = list(self.iter())
arr.sort(key=cmp_to_key(cmp))
return Range1D.from_list(arr)
def remove_duplicates(self, size=0):
arr = list(self.iter())
arr.sort()
arr = [di for i, di in enumerate(arr) if di != arr[i - 1]] # same value as before, remove
return Range1D.from_list(arr)
def reverse(self):
a = [r.reverse() for r in self._elems]
a = a.reverse()
return Range1D(a)
def __str__(self):
if self.isall:
return ''
if len(self) == 1:
return str(self._elems[0])
return '(' + ','.join(str(e) for e in self._elems) + ')'
class Range1DGroup(Range1D):
def __init__(self, name, color, base=None):
super(Range1DGroup, self).__init__(base)
self.name = name
self.color = color
def pre_multiply(self, sub, size=0):
r = super(Range1DGroup, self).pre_multiply(sub, size)
return Range1DGroup(self.name, self.color, r)
def union(self, other, size=0):
r = super(Range1DGroup, self).union(other, size)
return Range1DGroup(self.name, self.color, r)
def intersect(self, other, size=0):
r = super(Range1DGroup, self).intersect(other, size)
return Range1DGroup(self.name, self.color, r)
def without(self, without, size=0):
r = super(Range1DGroup, self).without(without, size)
return Range1DGroup(self.name, self.color, r)
def sort(self, cmp):
r = super(Range1DGroup, self).sort(cmp)
return Range1DGroup(self.name, self.color, r)
def __str__(self):
return '"' + self.name + '""' + self.color + '"' + str(super(Range1DGroup, self))
def as_ungrouped(range):
return Range1DGroup('unnamed', 'gray', range)
def composite(name, groups):
return CompositeRange1D(name, groups)
def to_base(groups):
if len(groups) == 1:
return groups[0]
r = groups[0].tolist()
for g in groups[1:]:
r.extend((i for i in g if i not in r))
return Range1D.from_list(r)
class CompositeRange1D(Range1D):
def __init__(self, name, groups, base=None):
super(CompositeRange1D, self).__init__(base if base is not None else to_base(groups))
self.name = name
self.groups = groups
def pre_multiply(self, sub, size=0):
r = super(CompositeRange1D, self).pre_multiply(sub, size) if len(self.groups) > 1 else None
return CompositeRange1D(self.name, [g.pre_multiply(sub, size) for g in self.groups], r)
def union(self, other, size=0):
r = super(CompositeRange1D, self).union(other, size) if len(self.groups) > 1 else None
return CompositeRange1D(self.name, [g.union(other, size) for g in self.groups], r)
def intersect(self, other, size=0):
r = super(CompositeRange1D, self).intersect(other, size) if len(self.groups) > 1 else None
return CompositeRange1D(self.name, [g.intersect(other, size) for g in self.groups], r)
def without(self, without, size=0):
r = super(CompositeRange1D, self).without(without, size) if len(self.groups) > 1 else None
return CompositeRange1D(self.name, [g.without(without, size) for g in self.groups], r)
def sort(self, cmp):
r = super(CompositeRange1D, self).sort(cmp) if len(self.groups) > 1 else None
return CompositeRange1D(self.name, [g.sort(cmp) for g in self.groups], r)
def __str__(self):
return '"' + self.name + '":' + ','.join((str(g) for g in self.groups)) + ''
class Range(object):
def __init__(self, dims=[]):
self.dims = dims
@property
def isall(self):
return all_f((d.isall for d in self.dims))
@property
def isnone(self):
return all_f((d.isnone for d in self.dims))
@property
def ndim(self):
return len(self.dims)
def __getitem__(self, item):
if len(self.dims) > item:
return self.dims[item]
for i in number_range(len(self.dims), item + 1):
self.dims.append(Range1D.all())
return self.dims[item]
def __setitem__(self, key, value):
self.dims[key] = value
def __len__(self):
return len(self.dims)
def __eq__(self, other):
if self is other or (self.isall and other.isall) or (self.isnone or other.isnone):
return True
return str(self) == str(other)
def pre_multiply(self, other, size=[]):
if self.isall:
return other.copy()
if other.isall:
return self.copy()
return Range([d.pre_multiply(other[i], size[i] if i >= len(size) else 0) for i, d in enumerate(self.dims)])
def union(self, other, size=[]):
if self.isall or other.isnone:
return self.copy()
if other.isall or self.isnone:
return other.copy()
return Range([d.union(other[i], size[i] if i >= len(size) else 0) for i, d in enumerate(self.dims)])
def intersect(self, other, size=[]):
if self.isnone or other.isnone:
return none()
if self.isall:
return other.copy()
if other.isall:
return self.copy()
return Range([d.intersect(other[i], size[i] if i >= len(size) else 0) for i, d in enumerate(self.dims)])
def without(self, without, size=[]):
if self.isnone or without.isnone:
return self.copy()
if without.isall:
return none()
return Range([d.without(without[i], size[i] if i >= len(size) else 0) for i, d in enumerate(self.dims)])
def copy(self):
return Range([d.copy() for d in self.dims])
def asslice(self, no_ellipsis=False):
if self.isall:
return Ellipsis
return tuple((d.asslice(no_ellipsis) for d in self.dims))
def swap(self):
a = [d.copy() for d in self.dims]
a.reverse()
return Range(a)
def filter(self, data, size=[]):
if self.isall:
return data
ndim = self.ndim
# recursive variant for just filtering the needed rows
def filter_dim(i):
if i >= ndim:
return lambda x: x
d = self[i]
nex = filter_dim(i + 1)
s = size[i] if len(size) > i else 0
return lambda elem: d.filter(elem, s, nex) if isinstance(elem, list) else elem
f = filter_dim(0)
return f(data)
def invert(self, indices, size=[]):
if self.isall:
return indices
return [self.dim(i).invert(index, size[i] if len(size) > i else 0) for i, index in enumerate(indices)]
def index_of_range(self, r, size=[]):
if r.isnone or self.isnone:
return none()
if self.isnone or r.isall:
return self.copy()
return Range([d.index_range_of(r[i], size[i] if len(size) > i else 0) for i, d in enumerate(self.dims)])
def index(self, index_or_range, *args):
if type(index_or_range) is Range:
return self.index_of_range(index_or_range, args[0])
if len(args) == 0:
if type(index_or_range) is int:
return self[0].index(index_or_range)
arr = index_or_range
else:
arr = [index_or_range]
arr.extend(args)
if len(arr) == 0:
return []
return [self[i].index(index) for i, index in enumerate(arr)]
def size(self, size=[]):
if self.isall:
return size
return [r.size(size[i] if len(size) > i else 0) for i, r in enumerate(self.dims)]
def split(self):
return [Range([dim]) for dim in self.dims]
def __str__(self):
return ','.join((str(d) for d in self.dims))
def __iter__(self):
return iter(self.dims)
def all():
return Range()
def none():
return Range([Range1D.none(), Range1D.none()])
def from_slice(start, end=-1, step=1):
r = Range()
r[0].set_slice(start, end, step)
return r
def range(*args):
if len(args) == 0:
return all()
r = Range()
if isinstance(args[0], list):
for i, arr in enumerate(args):
if len(arr) == 0:
continue
r[i].set_slice(arr[0], arr[1], arr[2])
if type(args[0]) is int:
r[0].set_slice(args[0], args[1], args[2])
return r
def join(*args):
if len(args) == 0:
return all()
r = Range()
if isinstance(args[0], list):
args = args[0]
r.dims = [ri[0] for ri in args]
return r
def from_list(*args):
if len(args) == 0:
return all()
r = Range()
if isinstance(args[0], list) and type(args[0][0]) is Range1D:
r.dims = args[0]
elif isinstance(args[0], list): # array mode
for i, arr in enumerate(args):
if type(arr) is Range1D:
r[i] = arr
else:
r[i].set_list(arr)
elif type(args[0]) is int: # single slice mode
r[0].set_list(args)
elif type(args[0]) is Range1D:
r.dims = args
return r
# Range EBNF grammar
# R = Dim : ',' Dim
# Dim = '' | SR | '(' SR : ',' SR ' ')'
# SR = N [ ':' N [ ':' N ] ]
# N = '0'...'9'
# Str = '"' literal '"'
# Name= Str
# Col = Str
# GDim= Name Col Dim
# CDim= Name ':' GDim : ',' GDim ''
def parse_range(code):
act = 0
dims = []
code = code.strip()
while act < len(code):
c = code[act]
if c == '"':
act, dim = parse_named_range1d(code, act)
act += 1 # skip ,
dims.append(dim)
elif c == ',':
act += 1
dims.append(Range1D.all())
else:
ract, dim = parse_range1d(code, act)
act = ract + 1 # skip ,
dims.append(dim)
return Range(dims)
def parse_named_range1d(code, act):
act += 1 # skip "
end = code.index('"', act)
name = code[act:end]
act = end + 1
c = code[act]
if c == '"':
end = code.index('"', act + 1)
ract, dim = parse_range1d(code, end + 1)
return ract, Range1DGroup(name, code[act + 1:end], dim)
elif c == '{':
groups = []
while code[act] != '}':
ract, dim = parse_named_range1d(code, act + 1)
groups.append(dim)
act = ract
return act + 1, CompositeRange1D(name, groups)
else: # error
return act, Range1D.all()
def parse_range1d(code, act):
if act >= len(code):
return act, Range1D.all()
c = code[act]
if c == ',' or c == '{':
n = act
r = Range1D.all()
elif c == '(':
n = code.index(')', act)
r = Range1D([RangeElem.parse(ni) for ni in code[act + 1: n].split(',')])
n += 1
else:
n = code.find(',', act)
n2 = code.find('}', act)
if n >= 0 and n2 >= 0:
n = min(n, n2)
elif n < 0:
n = n2
if n < 0:
n = len(code)
r = Range1D([RangeElem.parse(code[act:n])])
return n, r
def parse(*args):
if len(args) == 0:
return all()
if len(args) == 1 and type(args[0]) is Range:
return args[0]
return parse_range(','.join(args))
| 25.169126 | 112 | 0.607378 |
0a905290948958fdcaaf9f29b6e404a3f295b22f | 60,328 | py | Python | wwr_ui/randomizer_window.py | TeknoAnonymous/wwrando-multiworld | 11ad6d10996b74b0d8ed661e6fa31d80b39aba68 | [
"MIT"
] | null | null | null | wwr_ui/randomizer_window.py | TeknoAnonymous/wwrando-multiworld | 11ad6d10996b74b0d8ed661e6fa31d80b39aba68 | [
"MIT"
] | null | null | null | wwr_ui/randomizer_window.py | TeknoAnonymous/wwrando-multiworld | 11ad6d10996b74b0d8ed661e6fa31d80b39aba68 | [
"MIT"
] | null | null | null | from PySide6.QtGui import *
from PySide6.QtCore import *
from PySide6.QtWidgets import *
from wwr_ui.ui_randomizer_window import Ui_MainWindow
from wwr_ui.options import OPTIONS, NON_PERMALINK_OPTIONS, HIDDEN_OPTIONS, POTENTIALLY_UNBEATABLE_OPTIONS
from wwr_ui.update_checker import check_for_updates, LATEST_RELEASE_DOWNLOAD_PAGE_URL
from wwr_ui.inventory import INVENTORY_ITEMS, REGULAR_ITEMS, PROGRESSIVE_ITEMS, DEFAULT_STARTING_ITEMS, DEFAULT_RANDOMIZED_ITEMS
from wwr_ui.packedbits import PackedBitsReader, PackedBitsWriter
import random
import collections
from collections import OrderedDict
import os
import yaml
import traceback
import string
import struct
import base64
import colorsys
import time
import zipfile
import shutil
from randomizer import Randomizer, VERSION, TooFewProgressionLocationsError, InvalidCleanISOError
from wwrando_paths import SETTINGS_PATH, ASSETS_PATH, SEEDGEN_PATH, IS_RUNNING_FROM_SOURCE, CUSTOM_MODELS_PATH
import customizer
from logic.logic import Logic
from wwlib import texture_utils
class WWRandomizerWindow(QMainWindow):
VALID_SEED_CHARACTERS = "-_'%%.%s%s" % (string.ascii_letters, string.digits)
MAX_SEED_LENGTH = 42 # Limited by maximum length of game name in banner
def __init__(self, cmd_line_args=OrderedDict()):
super(WWRandomizerWindow, self).__init__()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.randomizer_thread = None
self.cmd_line_args = cmd_line_args
self.bulk_test = ("-bulk" in cmd_line_args)
self.no_ui_test = ("-noui" in cmd_line_args)
self.profiling = ("-profile" in cmd_line_args)
self.auto_seed = ("-autoseed" in cmd_line_args)
self.custom_color_selector_buttons = OrderedDict()
self.custom_color_selector_hex_inputs = OrderedDict()
self.custom_color_reset_buttons = OrderedDict()
self.custom_colors = OrderedDict()
self.initialize_custom_player_model_list()
self.initialize_color_presets_list()
self.ui.add_gear.clicked.connect(self.add_to_starting_gear)
self.randomized_gear_model = QStringListModel()
self.randomized_gear_model.setStringList(DEFAULT_RANDOMIZED_ITEMS.copy())
self.filtered_rgear = ModelFilterOut()
self.filtered_rgear.setSourceModel(self.randomized_gear_model)
self.ui.randomized_gear.setModel(self.filtered_rgear)
self.ui.remove_gear.clicked.connect(self.remove_from_starting_gear)
self.starting_gear_model = QStringListModel()
self.starting_gear_model.setStringList(DEFAULT_STARTING_ITEMS.copy())
self.ui.starting_gear.setModel(self.starting_gear_model)
self.preserve_default_settings()
self.cached_item_locations = Logic.load_and_parse_item_locations()
self.ui.starting_pohs.valueChanged.connect(self.update_health_label)
self.ui.starting_hcs.valueChanged.connect(self.update_health_label)
self.load_settings()
self.ui.clean_iso_path.editingFinished.connect(self.update_settings)
self.ui.output_folder.editingFinished.connect(self.update_settings)
self.ui.seed.editingFinished.connect(self.update_settings)
self.ui.clean_iso_path_browse_button.clicked.connect(self.browse_for_clean_iso)
self.ui.output_folder_browse_button.clicked.connect(self.browse_for_output_folder)
self.ui.permalink.textEdited.connect(self.permalink_modified)
self.ui.install_custom_model.clicked.connect(self.install_custom_model_zip)
self.ui.custom_player_model.currentIndexChanged.connect(self.custom_model_changed)
self.ui.player_in_casual_clothes.clicked.connect(self.in_casual_clothes_changed)
self.ui.randomize_all_custom_colors_together.clicked.connect(self.randomize_all_custom_colors_together)
self.ui.randomize_all_custom_colors_separately.clicked.connect(self.randomize_all_custom_colors_separately)
self.ui.custom_color_preset.currentIndexChanged.connect(self.color_preset_changed)
for option_name in OPTIONS:
widget = getattr(self.ui, option_name)
if isinstance(widget, QAbstractButton):
widget.clicked.connect(self.update_settings)
elif isinstance(widget, QComboBox):
widget.currentIndexChanged.connect(self.update_settings)
elif isinstance(widget, QListView):
pass
elif isinstance(widget, QSpinBox):
widget.valueChanged.connect(self.update_settings)
else:
raise Exception("Option widget is invalid: %s" % option_name)
self.ui.generate_seed_button.clicked.connect(self.generate_seed)
self.ui.randomize_button.clicked.connect(self.randomize)
self.ui.reset_settings_to_default.clicked.connect(self.reset_settings_to_default)
self.ui.about_button.clicked.connect(self.open_about)
for option_name in OPTIONS:
getattr(self.ui, option_name).installEventFilter(self)
label_for_option = getattr(self.ui, "label_for_" + option_name, None)
if label_for_option:
label_for_option.installEventFilter(self)
self.ui.sword_mode.highlighted.connect(self.update_sword_mode_highlighted_description)
self.set_option_description(None)
self.update_settings()
self.setWindowTitle("Wind Waker Randomizer %s" % VERSION)
icon_path = os.path.join(ASSETS_PATH, "icon.ico")
self.setWindowIcon(QIcon(icon_path))
if self.auto_seed:
self.generate_seed()
if self.no_ui_test:
self.randomize()
return
self.show()
# if not IS_RUNNING_FROM_SOURCE:
# self.update_checker_thread = UpdateCheckerThread()
# self.update_checker_thread.finished_checking_for_updates.connect(self.show_update_check_results)
# self.update_checker_thread.start()
# else:
# self.ui.update_checker_label.setText("(Running from source, skipping release update check.)")
def generate_seed(self):
random.seed(None)
with open(os.path.join(SEEDGEN_PATH, "adjectives.txt")) as f:
adjectives = random.sample(f.read().splitlines(), 2)
noun_file_to_use = random.choice(["nouns.txt", "names.txt"])
with open(os.path.join(SEEDGEN_PATH, noun_file_to_use)) as f:
noun = random.choice(f.read().splitlines())
words = adjectives + [noun]
capitalized_words = []
for word in words:
capitalized_word = ""
seen_first_letter = False
for char in word:
if char in string.ascii_letters and not seen_first_letter:
capitalized_word += char.capitalize()
seen_first_letter = True
else:
capitalized_word += char
capitalized_words.append(capitalized_word)
seed = "".join(capitalized_words)
seed = self.sanitize_seed(seed)
self.settings["seed"] = seed
self.ui.seed.setText(seed)
self.update_settings()
def sanitize_seed(self, seed):
seed = str(seed)
seed = seed.strip()
seed = "".join(char for char in seed if char in self.VALID_SEED_CHARACTERS)
seed = seed[:self.MAX_SEED_LENGTH]
return seed
def append_row(self, model, value):
model.insertRow(model.rowCount())
newrow = model.index(model.rowCount() - 1, 0)
model.setData(newrow, value)
def move_selected_rows(self, source, dest):
selection = source.selectionModel().selectedIndexes()
# Remove starting from the last so the previous indices remain valid
selection.sort(reverse = True, key = lambda x: x.row())
for item in selection:
value = item.data()
source.model().removeRow(item.row())
self.append_row(dest.model(), value)
def add_to_starting_gear(self):
self.move_selected_rows(self.ui.randomized_gear, self.ui.starting_gear)
self.ui.starting_gear.model().sort(0)
self.update_settings()
def remove_from_starting_gear(self):
self.move_selected_rows(self.ui.starting_gear, self.ui.randomized_gear)
self.ui.randomized_gear.model().sourceModel().sort(0)
self.update_settings()
def update_health_label(self):
pohs = self.ui.starting_pohs.value()
hcs = self.ui.starting_hcs.value() * 4
health = hcs + pohs + 12
pieces = health % 4
text = "Current Starting Health: %d hearts" % (health // 4) # full hearts
if pieces != 0:
if pieces == 1: # grammar check
text += " and 1 piece"
else:
text += " and %d pieces" % pieces
self.ui.current_health.setText(text)
def randomize(self):
clean_iso_path = self.settings["clean_iso_path"].strip()
output_folder = self.settings["output_folder"].strip()
self.settings["clean_iso_path"] = clean_iso_path
self.settings["output_folder"] = output_folder
self.ui.clean_iso_path.setText(clean_iso_path)
self.ui.output_folder.setText(output_folder)
if not os.path.isfile(clean_iso_path):
QMessageBox.warning(self, "Clean ISO path not specified", "Must specify path to your clean Wind Waker ISO (USA).")
return
if not os.path.isdir(output_folder):
QMessageBox.warning(self, "No output folder specified", "Must specify a valid output folder for the randomized files.")
return
seed = self.settings["seed"]
seed = self.sanitize_seed(seed)
if not seed:
self.generate_seed()
seed = self.settings["seed"]
self.settings["seed"] = seed
self.ui.seed.setText(seed)
self.update_settings()
options = OrderedDict()
for option_name in OPTIONS:
options[option_name] = self.get_option_value(option_name)
colors = OrderedDict()
for color_name in self.get_default_custom_colors_for_current_model():
colors[color_name] = self.get_color(color_name)
options["custom_colors"] = colors
permalink = self.ui.permalink.text()
max_progress_val = 20
if options.get("randomize_enemy_palettes"):
max_progress_val += 10
self.progress_dialog = RandomizerProgressDialog("Randomizing", "Initializing...", max_progress_val)
if self.bulk_test:
failures_done = 0
total_done = 0
for i in range(100):
temp_seed = str(i)
try:
rando = Randomizer(temp_seed, clean_iso_path, output_folder, options, permalink=permalink, cmd_line_args=self.cmd_line_args)
randomizer_generator = rando.randomize()
while True:
next_option_description, options_finished = next(randomizer_generator)
if options_finished == -1:
break
except Exception as e:
stack_trace = traceback.format_exc()
error_message = "Error on seed " + temp_seed + ":\n" + str(e) + "\n\n" + stack_trace
print(error_message)
failures_done += 1
total_done += 1
print("%d/%d seeds failed" % (failures_done, total_done))
try:
rando = Randomizer(seed, clean_iso_path, output_folder, options, permalink=permalink, cmd_line_args=self.cmd_line_args)
except (TooFewProgressionLocationsError, InvalidCleanISOError) as e:
error_message = str(e)
self.randomization_failed(error_message)
return
except Exception as e:
stack_trace = traceback.format_exc()
error_message = "Randomization failed with error:\n" + str(e) + "\n\n" + stack_trace
self.randomization_failed(error_message)
return
self.randomizer_thread = RandomizerThread(rando, profiling=self.profiling)
self.randomizer_thread.update_progress.connect(self.update_progress_dialog)
self.randomizer_thread.randomization_complete.connect(self.randomization_complete)
self.randomizer_thread.randomization_failed.connect(self.randomization_failed)
self.randomizer_thread.start()
def update_progress_dialog(self, next_option_description, options_finished):
self.progress_dialog.setLabelText(next_option_description)
self.progress_dialog.setValue(options_finished)
def randomization_complete(self):
self.progress_dialog.reset()
self.randomizer_thread = None
if self.no_ui_test:
self.close()
return
text = """Randomization complete.<br><br>
If you get stuck, check the progression spoiler log in the output folder."""
self.complete_dialog = QMessageBox()
self.complete_dialog.setTextFormat(Qt.TextFormat.RichText)
self.complete_dialog.setWindowTitle("Randomization complete")
self.complete_dialog.setText(text)
self.complete_dialog.setWindowIcon(self.windowIcon())
self.complete_dialog.show()
def randomization_failed(self, error_message):
self.progress_dialog.reset()
if self.randomizer_thread is not None:
try:
self.randomizer_thread.randomizer.write_error_log(error_message)
except Exception as e:
# If an error happened when writing the error log just print it and then ignore it.
stack_trace = traceback.format_exc()
other_error_message = "Failed to write error log:\n" + str(e) + "\n\n" + stack_trace
print(other_error_message)
self.randomizer_thread = None
print(error_message)
QMessageBox.critical(
self, "Randomization Failed",
error_message
)
def show_update_check_results(self, new_version):
if not new_version:
self.ui.update_checker_label.setText("No new updates to the randomizer are available.")
elif new_version == "error":
self.ui.update_checker_label.setText("There was an error checking for updates.")
else:
new_text = "<b>Version %s of the randomizer is available!</b>" % new_version
new_text += " <a href=\"%s\">Click here</a> to go to the download page." % LATEST_RELEASE_DOWNLOAD_PAGE_URL
self.ui.update_checker_label.setText(new_text)
def preserve_default_settings(self):
self.default_settings = OrderedDict()
for option_name in OPTIONS:
self.default_settings[option_name] = self.get_option_value(option_name)
def reset_settings_to_default(self):
any_setting_changed = False
for option_name in OPTIONS:
if option_name in self.default_settings:
default_value = self.default_settings[option_name]
current_value = self.get_option_value(option_name)
if default_value != current_value:
any_setting_changed = True
self.set_option_value(option_name, default_value)
any_color_changed = self.reset_color_selectors_to_model_default_colors()
if any_color_changed:
any_setting_changed = True
self.update_settings()
if not any_setting_changed:
QMessageBox.information(self,
"Settings already default",
"You already have all the default randomization settings."
)
def load_settings(self):
if os.path.isfile(SETTINGS_PATH):
with open(SETTINGS_PATH) as f:
self.settings = yaml.safe_load(f)
if self.settings is None:
self.settings = OrderedDict()
else:
self.settings = OrderedDict()
if "clean_iso_path" in self.settings:
self.ui.clean_iso_path.setText(self.settings["clean_iso_path"])
if "output_folder" in self.settings:
self.ui.output_folder.setText(self.settings["output_folder"])
if "seed" in self.settings:
self.ui.seed.setText(self.settings["seed"])
for option_name in OPTIONS:
if option_name in self.settings:
if option_name == "custom_color_preset":
# Color presets not loaded yet, handle this later
continue
self.set_option_value(option_name, self.settings[option_name])
self.reload_custom_model(update_preview=False)
if "custom_colors" in self.settings:
custom_colors_from_settings = self.settings["custom_colors"]
# Only read colors into the self.custom_colors dict if they are valid colors for this model.
for color_name, default_color in self.get_default_custom_colors_for_current_model().items():
if color_name in custom_colors_from_settings:
self.custom_colors[color_name] = custom_colors_from_settings[color_name]
else:
self.custom_colors[color_name] = default_color
# Update the GUI buttons to match the custom colors (or the preset colors, if a preset is selected).
for color_name in self.get_default_custom_colors_for_current_model():
color = self.get_color(color_name)
option_name = "custom_color_" + color_name
self.set_color(option_name, color, update_preview=False, save_color_as_custom=False)
if "custom_color_preset" in self.settings:
self.set_option_value("custom_color_preset", self.settings["custom_color_preset"])
self.reload_colors()
self.update_model_preview()
def save_settings(self):
with open(SETTINGS_PATH, "w") as f:
yaml.dump(self.settings, f, default_flow_style=False, Dumper=yaml.Dumper)
def update_settings(self):
self.settings["clean_iso_path"] = self.ui.clean_iso_path.text()
self.settings["output_folder"] = self.ui.output_folder.text()
self.settings["seed"] = self.ui.seed.text()
self.ensure_valid_combination_of_options()
self.disable_invalid_cosmetic_options()
for option_name in OPTIONS:
self.settings[option_name] = self.get_option_value(option_name)
self.settings["custom_colors"] = self.custom_colors
self.save_settings()
self.encode_permalink()
self.update_total_progress_locations()
def update_total_progress_locations(self):
options = OrderedDict()
for option_name in OPTIONS:
options[option_name] = self.get_option_value(option_name)
num_progress_locations = Logic.get_num_progression_locations_static(self.cached_item_locations, options)
text = "Where Should Progress Items Appear? (Selected: %d Possible Progression Locations)" % num_progress_locations
self.ui.groupBox.setTitle(text)
def permalink_modified(self):
permalink = self.ui.permalink.text()
try:
self.decode_permalink(permalink)
except Exception as e:
stack_trace = traceback.format_exc()
error_message = "Failed to parse permalink:\n" + str(e) + "\n\n" + stack_trace
print(error_message)
QMessageBox.critical(
self, "Invalid permalink",
"The permalink you pasted is invalid."
)
self.encode_permalink()
def encode_permalink(self):
seed = self.settings["seed"]
seed = self.sanitize_seed(seed)
if not seed:
self.ui.permalink.setText("")
return
permalink = b""
permalink += VERSION.encode("ascii")
permalink += b"\0"
permalink += seed.encode("ascii")
permalink += b"\0"
bitswriter = PackedBitsWriter()
for option_name in OPTIONS:
if option_name in NON_PERMALINK_OPTIONS:
continue
value = self.settings[option_name]
if option_name == "randomize_enemy_palettes" and not self.get_option_value("randomize_enemies"):
# Enemy palette randomizer doesn't need to be in the permalink when enemy rando is off.
# So just put a 0 bit as a placeholder.
value = False
widget = getattr(self.ui, option_name)
if isinstance(widget, QAbstractButton):
bitswriter.write(int(value), 1)
elif isinstance(widget, QComboBox):
value = widget.currentIndex()
assert 0 <= value <= 255
bitswriter.write(value, 8)
elif isinstance(widget, QSpinBox):
box_length = (widget.maximum() - widget.minimum()).bit_length()
value = widget.value() - widget.minimum()
assert 0 <= value < (2 ** box_length)
bitswriter.write(value, box_length)
elif widget == self.ui.starting_gear:
# randomized_gear is a complement of starting_gear
for i in range(len(REGULAR_ITEMS)):
bit = REGULAR_ITEMS[i] in value
bitswriter.write(bit, 1)
unique_progressive_items = list(set(PROGRESSIVE_ITEMS))
unique_progressive_items.sort()
for item in unique_progressive_items:
# No Progressive Sword and there's no more than
# 3 of any other Progressive item so two bits per item
bitswriter.write(value.count(item), 2)
bitswriter.flush()
for byte in bitswriter.bytes:
permalink += struct.pack(">B", byte)
base64_encoded_permalink = base64.b64encode(permalink).decode("ascii")
self.ui.permalink.setText(base64_encoded_permalink)
def decode_permalink(self, base64_encoded_permalink):
base64_encoded_permalink = base64_encoded_permalink.strip()
if not base64_encoded_permalink:
# Empty
return
permalink = base64.b64decode(base64_encoded_permalink)
given_version_num, seed, options_bytes = permalink.split(b"\0", 2)
given_version_num = given_version_num.decode("ascii")
seed = seed.decode("ascii")
if given_version_num != VERSION:
QMessageBox.critical(
self, "Invalid permalink",
"The permalink you pasted is for version %s of the randomizer, it cannot be used with the version you are currently using (%s)." % (given_version_num, VERSION)
)
return
self.ui.seed.setText(seed)
option_bytes = struct.unpack(">" + "B"*len(options_bytes), options_bytes)
prev_randomize_enemy_palettes_value = self.get_option_value("randomize_enemy_palettes")
bitsreader = PackedBitsReader(option_bytes)
for option_name in OPTIONS:
if option_name in NON_PERMALINK_OPTIONS:
continue
widget = getattr(self.ui, option_name)
if isinstance(widget, QAbstractButton):
boolean_value = bitsreader.read(1)
self.set_option_value(option_name, boolean_value)
elif isinstance(widget, QComboBox):
index = bitsreader.read(8)
if index >= widget.count() or index < 0:
index = 0
value = widget.itemText(index)
self.set_option_value(option_name, value)
elif isinstance(widget, QSpinBox):
box_length = (widget.maximum() - widget.minimum()).bit_length()
value = bitsreader.read(box_length) + widget.minimum()
if value > widget.maximum() or value < widget.minimum():
value = self.default_settings[option_name]
self.set_option_value(option_name, value)
elif widget == self.ui.starting_gear:
# Reset model with only the regular items
self.randomized_gear_model.setStringList(REGULAR_ITEMS.copy())
self.starting_gear_model.setStringList([])
self.filtered_rgear.setFilterStrings([])
for i in range(len(REGULAR_ITEMS)):
starting = bitsreader.read(1)
if starting == 1:
self.ui.randomized_gear.selectionModel().select(self.randomized_gear_model.index(i), QItemSelectionModel.Select)
self.move_selected_rows(self.ui.randomized_gear, self.ui.starting_gear)
# Progressive items are all after regular items
unique_progressive_items = list(set(PROGRESSIVE_ITEMS))
unique_progressive_items.sort()
for item in unique_progressive_items:
amount = bitsreader.read(2)
randamount = PROGRESSIVE_ITEMS.count(item) - amount
for i in range(amount):
self.append_row(self.starting_gear_model, item)
for i in range(randamount):
self.append_row(self.randomized_gear_model, item)
if not self.get_option_value("randomize_enemies"):
# If a permalink with enemy rando off was pasted, we don't want to change enemy palette rando to match the permalink.
# So revert it to the value from before reading the permalink.
self.set_option_value("randomize_enemy_palettes", prev_randomize_enemy_palettes_value)
self.update_settings()
def browse_for_clean_iso(self):
if self.settings["clean_iso_path"] and os.path.isfile(self.settings["clean_iso_path"]):
default_dir = os.path.dirname(self.settings["clean_iso_path"])
else:
default_dir = None
clean_iso_path, selected_filter = QFileDialog.getOpenFileName(self, "Select clean Wind Waker ISO", default_dir, "GC ISO Files (*.iso *.gcm)")
if not clean_iso_path:
return
self.ui.clean_iso_path.setText(clean_iso_path)
self.update_settings()
def browse_for_output_folder(self):
if self.settings["output_folder"] and os.path.isdir(self.settings["output_folder"]):
default_dir = self.settings["output_folder"]
else:
default_dir = None
output_folder_path = QFileDialog.getExistingDirectory(self, "Select output folder", default_dir)
if not output_folder_path:
return
self.ui.output_folder.setText(output_folder_path)
self.update_settings()
def eventFilter(self, target, event):
if event.type() == QEvent.Enter:
option_name = target.objectName()
if option_name.startswith("label_for_"):
option_name = option_name[len("label_for_"):]
if option_name in OPTIONS:
self.set_option_description(OPTIONS[option_name])
else:
self.set_option_description(None)
return True
elif event.type() == QEvent.Leave:
self.set_option_description(None)
return True
return QMainWindow.eventFilter(self, target, event)
def update_sword_mode_highlighted_description(self, index):
option_name = self.ui.sword_mode.itemText(index)
if option_name == "Start with Hero's Sword":
desc = "Start with Hero's Sword: You will start the game with the basic Hero's Sword already in your inventory (the default)."
elif option_name == "No Starting Sword":
desc = "No Starting Sword: You will start the game with no sword, and have to find it somewhere in the world like other randomized items."
elif option_name == "Swordless":
desc = "Swordless: You will start the game with no sword, and won't be able to find it anywhere. You have to beat the entire game using other items as weapons instead of the sword.\n(Note that Phantom Ganon in FF becomes vulnerable to Skull Hammer in this mode.)"
else:
desc = None
self.set_option_description(desc)
def get_option_value(self, option_name):
widget = getattr(self.ui, option_name)
if isinstance(widget, QCheckBox) or isinstance(widget, QRadioButton):
return widget.isChecked()
elif isinstance(widget, QComboBox):
return widget.itemText(widget.currentIndex())
elif isinstance(widget, QSpinBox):
return widget.value()
elif isinstance(widget, QListView):
if widget.model() == None:
return []
model = widget.model();
if isinstance(model, ModelFilterOut):
model = model.sourceModel()
model.sort(0)
return [model.data(model.index(i)) for i in range(model.rowCount())]
else:
print("Option widget is invalid: %s" % option_name)
def set_option_value(self, option_name, new_value):
widget = getattr(self.ui, option_name)
if isinstance(widget, QCheckBox) or isinstance(widget, QRadioButton):
widget.setChecked(bool(new_value))
elif isinstance(widget, QComboBox):
index_of_value = None
for i in range(widget.count()):
text = widget.itemText(i)
if text == new_value:
index_of_value = i
break
if index_of_value is None:
print("Cannot find value %s in combobox %s" % (new_value, option_name))
index_of_value = 0
widget.setCurrentIndex(index_of_value)
elif isinstance(widget, QSpinBox):
if new_value < widget.minimum() or new_value > widget.maximum():
print("Value %s out of range for spinbox %s" % (new_value, option_name))
new_value = self.default_settings[option_name] # reset to default in case 0 is not default or in normal range
widget.setValue(new_value)
elif isinstance(widget, QListView):
if not isinstance(new_value, list):
new_value = self.default_settings[option_name]
if widget.model() != None:
model = widget.model()
if isinstance(model, QSortFilterProxyModel):
model = model.sourceModel()
model.setStringList(new_value)
model.sort(0)
else:
print("Option widget is invalid: %s" % option_name)
def set_option_description(self, new_description):
if new_description is None:
self.ui.option_description.setText("(Hover over an option to see a description of what it does.)")
self.ui.option_description.setStyleSheet("color: grey;")
else:
self.ui.option_description.setText(new_description)
self.ui.option_description.setStyleSheet("")
def initialize_custom_player_model_list(self):
self.ui.custom_player_model.addItem("Link")
custom_model_names = customizer.get_all_custom_model_names()
for custom_model_name in custom_model_names:
self.ui.custom_player_model.addItem(custom_model_name)
if custom_model_names:
self.ui.custom_player_model.addItem("Random")
self.ui.custom_player_model.addItem("Random (exclude Link)")
else:
self.ui.custom_player_model.setEnabled(False)
def update_custom_player_model_list(self):
self.ui.custom_player_model.clear()
self.initialize_custom_player_model_list()
def initialize_color_presets_list(self):
self.ui.custom_color_preset.addItem("Default")
self.ui.custom_color_preset.addItem("Custom")
self.update_color_presets_list(reload_colors=False)
def update_color_presets_list(self, reload_colors=True):
# Temporarily prevent the preset changing from regenerating the preview image since we'll be changing it several times in this function.
self.ui.custom_color_preset.blockSignals(True)
# Keep track of what the value of the presets dropdown was.
prev_selected_preset_type = self.get_option_value("custom_color_preset")
# Remove everything except "Default" and "Custom".
for i in reversed(range(self.ui.custom_color_preset.count())):
if self.ui.custom_color_preset.itemText(i) in ["Default", "Custom"]:
continue
self.ui.custom_color_preset.removeItem(i)
# Add the presets specific to this model.
presets = self.get_color_presets_for_current_model()
for preset_name in presets:
if preset_name in ["Default", "Custom"]:
QMessageBox.warning(self, "Invalid color preset name", "The selected player model has a preset named \"%s\", which is a reserved name. This preset will be ignored." % preset_name)
continue
self.ui.custom_color_preset.addItem(preset_name)
# If the new model has a preset with the same name as the selected preset for the previous model, set the dropdown back to that value.
# This is so switching between hero/casual doesn't reset the preset you have selected, in cases where the same preset is specified for both hero and casual.
# (This has the side effect of preserving the preset even across entirely different models if they happen to have presets of the same name.)
if prev_selected_preset_type in presets:
self.set_option_value("custom_color_preset", prev_selected_preset_type)
else:
# Otherwise switch to Default, since the Casual colors get cleared on model switch anyway.
self.set_option_value("custom_color_preset", "Default")
if reload_colors:
# Because we blocked signals, we manually reload the color buttons, without generating the preview.
self.reload_colors(update_preview=False)
self.ui.custom_color_preset.blockSignals(False)
def reload_custom_model(self, update_preview=True):
self.disable_invalid_cosmetic_options()
while self.ui.custom_colors_layout.count():
item = self.ui.custom_colors_layout.takeAt(0)
hlayout = item.layout()
while hlayout.count():
item = hlayout.takeAt(0)
widget = item.widget()
if widget:
widget.deleteLater()
self.custom_color_selector_buttons = OrderedDict()
self.custom_color_selector_hex_inputs = OrderedDict()
self.custom_color_reset_buttons = OrderedDict()
custom_model_name = self.get_option_value("custom_player_model")
metadata = customizer.get_model_metadata(custom_model_name)
if metadata is None:
return
if "error_message" in metadata:
error_message = "Syntax error when trying to read metadata.txt for custom model: %s\n\n%s" %(custom_model_name, metadata["error_message"])
print(error_message)
QMessageBox.critical(
self, "Failed to load model metadata",
error_message
)
model_author = metadata.get("author", None)
model_comment = metadata.get("comment", None)
comment_lines = []
if model_author:
comment_lines.append("Model author: %s" % model_author)
if model_comment:
comment_lines.append("Model author comment: %s" % model_comment)
self.ui.custom_model_comment.setText("\n".join(comment_lines))
if len(comment_lines) <= 0:
self.ui.custom_model_comment.hide()
else:
self.ui.custom_model_comment.show()
# Allow customizing the text of the Casual Clothes checkbox.
casual_clothes_option_text = str(metadata.get("casual_clothes_option_text", "Casual Clothes"))
if len(casual_clothes_option_text) > 28:
# 28 character maximum length.
casual_clothes_option_text = casual_clothes_option_text[:28]
self.ui.player_in_casual_clothes.setText(casual_clothes_option_text)
is_casual = self.get_option_value("player_in_casual_clothes")
if is_casual:
prefix = "casual"
else:
prefix = "hero"
self.custom_colors = OrderedDict()
custom_colors = metadata.get(prefix + "_custom_colors", {})
for custom_color_name, default_color in custom_colors.items():
option_name = "custom_color_" + custom_color_name
hlayout = QHBoxLayout()
label_for_color_selector = QLabel(self.ui.tab_2)
label_for_color_selector.setText("%s Color" % custom_color_name)
hlayout.addWidget(label_for_color_selector)
color_hex_code_input = QLineEdit(self.ui.tab_2)
color_hex_code_input.setText("")
color_hex_code_input.setObjectName(option_name + "_hex_code_input")
color_hex_code_input.setFixedWidth(QFontMetrics(QFont()).horizontalAdvance("CCCCCC")+5)
color_hex_code_input.setAlignment(Qt.AlignHCenter | Qt.AlignVCenter)
hlayout.addWidget(color_hex_code_input)
color_randomize_button = QPushButton(self.ui.tab_2)
color_randomize_button.setText("Random")
color_randomize_button.setObjectName(option_name + "_randomize_color")
color_randomize_button.setFixedWidth(QFontMetrics(QFont()).horizontalAdvance("Random")+11)
hlayout.addWidget(color_randomize_button)
color_selector_button = QPushButton(self.ui.tab_2)
color_selector_button.setText("Click to set color")
color_selector_button.setObjectName(option_name)
hlayout.addWidget(color_selector_button)
color_reset_button = QPushButton(self.ui.tab_2)
color_reset_button.setText("X")
color_reset_button.setObjectName(option_name + "_reset_color")
color_reset_button.setFixedWidth(QFontMetrics(QFont()).horizontalAdvance("X")+11)
size_policy = color_reset_button.sizePolicy()
size_policy.setRetainSizeWhenHidden(True)
color_reset_button.setSizePolicy(size_policy)
color_reset_button.setVisible(False)
hlayout.addWidget(color_reset_button)
self.custom_color_selector_buttons[option_name] = color_selector_button
color_selector_button.clicked.connect(self.open_custom_color_chooser)
self.custom_color_selector_hex_inputs[option_name] = color_hex_code_input
color_hex_code_input.textEdited.connect(self.custom_color_hex_code_changed)
color_hex_code_input.editingFinished.connect(self.custom_color_hex_code_finished_editing)
color_randomize_button.clicked.connect(self.randomize_one_custom_color)
color_reset_button.clicked.connect(self.reset_one_custom_color)
self.custom_color_reset_buttons[option_name] = color_reset_button
self.ui.custom_colors_layout.addLayout(hlayout)
self.set_color(option_name, default_color, update_preview=False, save_color_as_custom=False)
if len(custom_colors) == 0:
# Need to push the preview over to the right even when there are no colors to do it, so add a spacer.
hlayout = QHBoxLayout()
hspacer = QSpacerItem(20, 40, QSizePolicy.Expanding, QSizePolicy.Minimum)
hlayout.addItem(hspacer)
self.ui.custom_colors_layout.addLayout(hlayout)
self.update_color_presets_list()
if update_preview:
self.update_model_preview()
# Hide the custom voice disable option for models that don't have custom voice files.
if custom_model_name == "Random" or custom_model_name == "Random (exclude Link)":
self.ui.disable_custom_player_voice.show()
else:
custom_model_path = os.path.join(CUSTOM_MODELS_PATH, custom_model_name)
jaiinit_aaf_path = os.path.join(custom_model_path, "sound", "JaiInit.aaf")
voice_aw_path = os.path.join(custom_model_path, "sound", "voice_0.aw")
if os.path.isfile(jaiinit_aaf_path) and os.path.isfile(voice_aw_path):
self.ui.disable_custom_player_voice.show()
else:
self.ui.disable_custom_player_voice.hide()
# Hide the custom items disable option for Link, but not any other models (since we don't know which have custom items).
if custom_model_name == "Link":
self.ui.disable_custom_player_items.hide()
else:
self.ui.disable_custom_player_items.show()
def reload_colors(self, update_preview=True):
for color_name in self.get_default_custom_colors_for_current_model():
color = self.get_color(color_name)
self.set_color("custom_color_" + color_name, color, update_preview=False, save_color_as_custom=False)
if update_preview:
self.update_model_preview()
def custom_model_changed(self, index):
self.reload_custom_model()
def in_casual_clothes_changed(self, checked):
self.reload_custom_model()
def color_preset_changed(self, index):
self.reload_colors()
def reset_color_selectors_to_model_default_colors(self):
default_colors = self.get_default_custom_colors_for_current_model()
any_color_changed = False
for custom_color_name, default_color in default_colors.items():
if custom_color_name in self.custom_colors and self.custom_colors[custom_color_name] != default_color:
any_color_changed = True
option_name = "custom_color_" + custom_color_name
self.set_color(option_name, default_color, update_preview=False, save_color_as_custom=False)
if any_color_changed:
self.update_model_preview()
return any_color_changed
def ensure_valid_combination_of_options(self):
items_to_filter_out = []
should_enable_options = {}
for option_name in OPTIONS:
should_enable_options[option_name] = True
if not self.get_option_value("progression_dungeons"):
# Race mode places required items on dungeon bosses.
should_enable_options["race_mode"] = False
sword_mode = self.get_option_value("sword_mode")
if sword_mode == "Swordless":
items_to_filter_out += ["Hurricane Spin"]
if sword_mode in ["Swordless", "No Starting Sword"]:
items_to_filter_out += 3 * ["Progressive Sword"]
if self.get_option_value("race_mode"):
num_possible_rewards = 8 - int(self.get_option_value("num_starting_triforce_shards"))
potential_boss_rewards = []
if sword_mode == "Start with Hero's Sword":
potential_boss_rewards += 3 * ["Progressive Sword"]
elif sword_mode == "No Starting Sword":
num_possible_rewards += 4
potential_boss_rewards += 3 * ["Progressive Bow"] + ["Hookshot", "Progressive Shield", "Boomerang"]
while num_possible_rewards < int(self.get_option_value("num_race_mode_dungeons")):
cur_reward = potential_boss_rewards.pop(0)
items_to_filter_out += [cur_reward]
num_possible_rewards += 1
else:
should_enable_options["num_race_mode_dungeons"] = False
mw_opts = ["world_count", "world_id", "label_for_world_count", "label_for_world_id"]
if not self.get_option_value("multiworld") == "Multiworld":
for i in mw_opts:
getattr(self.ui, i).setEnabled(False)
else:
for i in mw_opts:
getattr(self.ui, i).setEnabled(True)
self.filtered_rgear.setFilterStrings(items_to_filter_out)
starting_gear = self.get_option_value("starting_gear")
randomized_gear = self.get_option_value("randomized_gear")
for item in items_to_filter_out:
if item in randomized_gear:
randomized_gear.remove(item)
elif item in starting_gear:
starting_gear.remove(item)
randomized_gear += items_to_filter_out
self.set_option_value("starting_gear", starting_gear)
self.set_option_value("randomized_gear", randomized_gear)
compare = lambda x, y: collections.Counter(x) == collections.Counter(y)
all_gear = self.get_option_value("starting_gear") + self.get_option_value("randomized_gear");
if not compare(all_gear, INVENTORY_ITEMS):
print("Gear list invalid, resetting")
for opt in ["randomized_gear", "starting_gear"]:
self.set_option_value(opt, self.default_settings[opt])
for option_name in OPTIONS:
widget = getattr(self.ui, option_name)
label_for_option = getattr(self.ui, "label_for_" + option_name, None)
if should_enable_options[option_name]:
widget.setEnabled(True)
if label_for_option:
label_for_option.setEnabled(True)
else:
widget.setEnabled(False)
if isinstance(widget, QAbstractButton):
widget.setChecked(False)
if label_for_option:
label_for_option.setEnabled(False)
# Disable options that produce unbeatable seeds when not running from source.
if not IS_RUNNING_FROM_SOURCE:
for option_name in POTENTIALLY_UNBEATABLE_OPTIONS:
if self.get_option_value(option_name):
self.set_option_value(option_name, False)
self.update_settings()
# Hide certain options from the GUI (still accessible via settings.txt and permalinks).
for option_name in HIDDEN_OPTIONS:
widget = getattr(self.ui, option_name)
if self.get_option_value(option_name):
widget.show()
else:
widget.hide()
def disable_invalid_cosmetic_options(self):
custom_model_name = self.get_option_value("custom_player_model")
metadata = customizer.get_model_metadata(custom_model_name)
if metadata is None:
self.ui.player_in_casual_clothes.setEnabled(True)
self.set_option_value("player_in_casual_clothes", False)
else:
disable_casual_clothes = metadata.get("disable_casual_clothes", False)
if disable_casual_clothes:
self.ui.player_in_casual_clothes.setEnabled(False)
self.ui.player_in_casual_clothes.setChecked(False)
else:
self.ui.player_in_casual_clothes.setEnabled(True)
def get_color(self, color_name):
preset_type = self.get_option_value("custom_color_preset")
default_colors = self.get_default_custom_colors_for_current_model()
if preset_type == "Default":
return default_colors[color_name]
elif preset_type == "Custom":
if color_name in self.custom_colors:
return self.custom_colors[color_name]
else:
return default_colors[color_name]
else:
color_presets = self.get_color_presets_for_current_model()
if preset_type not in color_presets:
print("Could not find color preset \"%s\" in the model's metadata.txt" % preset_type)
return default_colors[color_name]
preset = color_presets[preset_type]
if color_name in preset:
return preset[color_name]
else:
return default_colors[color_name]
def set_color(self,
option_name, color, update_preview=True,
save_color_as_custom=True, move_other_non_custom_colors_to_custom=True
):
if isinstance(color, tuple):
color = list(color)
if not (isinstance(color, list) and len(color) == 3):
color = [255, 255, 255]
assert option_name.startswith("custom_color_")
color_name = option_name[len("custom_color_"):]
color_button = self.custom_color_selector_buttons[option_name]
hex_input = self.custom_color_selector_hex_inputs[option_name]
reset_button = self.custom_color_reset_buttons[option_name]
if color is None:
color_button.setStyleSheet("")
hex_input.setText("")
else:
hex_input.setText("%02X%02X%02X" % tuple(color))
r, g, b = color
# Depending on the value of the background color of the button, we need to make the text color either black or white for contrast.
h, s, v = colorsys.rgb_to_hsv(r/255, g/255, b/255)
if v > 0.5:
text_color = (0, 0, 0)
else:
text_color = (255, 255, 255)
color_button.setStyleSheet(
"background-color: rgb(%d, %d, %d);" % (r, g, b) + \
"color: rgb(%d, %d, %d);" % text_color,
)
default_colors = self.get_default_custom_colors_for_current_model()
default_color = default_colors[color_name]
if color == default_color:
reset_button.setVisible(False)
else:
reset_button.setVisible(True)
if save_color_as_custom:
# First, save the color as a custom color.
self.custom_colors[color_name] = color
if self.get_option_value("custom_color_preset") != "Custom" and move_other_non_custom_colors_to_custom:
# If the presets dropdown isn't already on Custom, we'll switch to to Custom automatically.
# However, in order to prevent all the other colors besides this one from abruptly switching when we do that, we need to copy all of the currently visible default or preset colors (except this currently changing color) over to custom colors.
for other_color_name in default_colors:
if color_name == other_color_name:
continue
color = self.get_color(other_color_name)
other_option_name = "custom_color_" + other_color_name
self.set_color(other_option_name, color, update_preview=False, save_color_as_custom=True, move_other_non_custom_colors_to_custom=False)
# Then we actually switch the presets dropdown to Custom.
self.set_option_value("custom_color_preset", "Custom")
if update_preview:
self.update_model_preview()
def open_custom_color_chooser(self):
option_name = self.sender().objectName()
assert option_name.startswith("custom_color_")
color_name = option_name[len("custom_color_"):]
r, g, b = self.get_color(color_name)
initial_color = QColor(r, g, b, 255)
color = QColorDialog.getColor(initial_color, self, "Select color")
if not color.isValid():
return
r = color.red()
g = color.green()
b = color.blue()
self.set_color(option_name, [r, g, b])
self.update_settings()
def custom_color_hex_code_changed(self):
option_name, color_name = self.get_option_name_and_color_name_from_sender_object_name()
text = self.sender().text().strip().lstrip("#").upper()
if len(text) != 6 or any(c for c in text if c not in "0123456789ABCDEF"):
return False
r = int(text[0:2], 16)
g = int(text[2:4], 16)
b = int(text[4:6], 16)
self.set_color(option_name, [r, g, b])
self.update_settings()
return True
def custom_color_hex_code_finished_editing(self):
is_valid_color = self.custom_color_hex_code_changed()
if not is_valid_color:
# If the hex code is invalid reset the text to the correct hex code for the current color.
option_name, color_name = self.get_option_name_and_color_name_from_sender_object_name()
self.set_color(option_name, self.get_color(color_name))
def reset_one_custom_color(self):
option_name, color_name = self.get_option_name_and_color_name_from_sender_object_name()
default_colors = self.get_default_custom_colors_for_current_model()
default_color = default_colors[color_name]
if self.get_color(color_name) != default_color:
self.set_color(option_name, default_color)
self.update_settings()
def get_random_h_and_v_shifts_for_custom_color(self, default_color):
r, g, b = default_color
h, s, v = colorsys.rgb_to_hsv(r/255, g/255, b/255)
h = int(h*360)
s = int(s*100)
v = int(v*100)
min_v_shift = -40
max_v_shift = 40
if s < 10:
# For very unsaturated colors, we want to limit the range of value randomization to exclude results that wouldn't change anything anyway.
# This effectively stops white and black from having a 50% chance to not change at all.
min_v_shift = max(-40, 0-v)
max_v_shift = min(40, 100-v)
h_shift = random.randint(0, 359)
v_shift = random.randint(min_v_shift, max_v_shift)
return (h_shift, v_shift)
def randomize_one_custom_color(self):
option_name, color_name = self.get_option_name_and_color_name_from_sender_object_name()
default_colors = self.get_default_custom_colors_for_current_model()
default_color = default_colors[color_name]
h_shift, v_shift = self.get_random_h_and_v_shifts_for_custom_color(default_color)
color = texture_utils.hsv_shift_color(default_color, h_shift, v_shift)
self.set_color(option_name, color)
self.update_settings()
def randomize_all_custom_colors_together(self):
default_colors = self.get_default_custom_colors_for_current_model()
h_shift = random.randint(0, 359)
v_shift = random.randint(-40, 40)
for custom_color_name, default_color in default_colors.items():
color = texture_utils.hsv_shift_color(default_color, h_shift, v_shift)
option_name = "custom_color_" + custom_color_name
self.set_color(option_name, color, update_preview=False)
self.update_model_preview()
self.update_settings()
def randomize_all_custom_colors_separately(self):
default_colors = self.get_default_custom_colors_for_current_model()
for custom_color_name, default_color in default_colors.items():
h_shift, v_shift = self.get_random_h_and_v_shifts_for_custom_color(default_color)
color = texture_utils.hsv_shift_color(default_color, h_shift, v_shift)
option_name = "custom_color_" + custom_color_name
self.set_color(option_name, color, update_preview=False)
self.update_model_preview()
self.update_settings()
def get_option_name_and_color_name_from_sender_object_name(self):
object_name = self.sender().objectName()
if object_name.endswith("_hex_code_input"):
option_name = object_name[:len(object_name)-len("_hex_code_input")]
elif object_name.endswith("_randomize_color"):
option_name = object_name[:len(object_name)-len("_randomize_color")]
elif object_name.endswith("_reset_color"):
option_name = object_name[:len(object_name)-len("_reset_color")]
else:
raise Exception("Invalid custom color sender object name: %s" % object_name)
assert option_name.startswith("custom_color_")
color_name = option_name[len("custom_color_"):]
return (option_name, color_name)
def get_current_model_metadata_and_prefix(self):
custom_model_name = self.get_option_value("custom_player_model")
is_casual = self.get_option_value("player_in_casual_clothes")
if is_casual:
prefix = "casual"
else:
prefix = "hero"
metadata = customizer.get_model_metadata(custom_model_name)
if metadata is None:
return ({}, prefix)
return (metadata, prefix)
def get_default_custom_colors_for_current_model(self):
metadata, prefix = self.get_current_model_metadata_and_prefix()
default_colors = metadata.get(prefix + "_custom_colors", {})
return default_colors
def get_color_presets_for_current_model(self):
metadata, prefix = self.get_current_model_metadata_and_prefix()
color_presets = metadata.get(prefix + "_color_presets", {})
return color_presets
def update_model_preview(self):
if self.no_ui_test:
# Preview can't be seen without a UI anyway, don't waste time generating it.
return
custom_model_name = self.get_option_value("custom_player_model")
custom_model_metadata = customizer.get_model_metadata(custom_model_name)
disable_casual_clothes = custom_model_metadata.get("disable_casual_clothes", False)
if self.get_option_value("player_in_casual_clothes") and not disable_casual_clothes:
prefix = "casual"
else:
prefix = "hero"
colors = OrderedDict()
for color_name in self.get_default_custom_colors_for_current_model():
colors[color_name] = self.get_color(color_name)
try:
preview_image = customizer.get_model_preview_image(custom_model_name, prefix, colors)
except Exception as e:
stack_trace = traceback.format_exc()
error_message = "Failed to load model preview image for model %s.\nError:\n" % (custom_model_name) + str(e) + "\n\n" + stack_trace
print(error_message)
QMessageBox.critical(
self, "Failed to load model preview",
error_message
)
return
if preview_image is None:
self.ui.custom_model_preview_label.hide()
return
self.ui.custom_model_preview_label.show()
data = preview_image.tobytes('raw', 'BGRA')
qimage = QImage(data, preview_image.width, preview_image.height, QImage.Format_ARGB32)
scaled_pixmap = QPixmap.fromImage(qimage).scaled(225, 350, Qt.KeepAspectRatio, Qt.SmoothTransformation)
self.ui.custom_model_preview_label.setPixmap(scaled_pixmap)
def install_custom_model_zip(self):
try:
zip_path, selected_filter = QFileDialog.getOpenFileName(self, "Select custom model zip file", CUSTOM_MODELS_PATH, "Zip Files (*.zip)")
if not zip_path:
return
zip = zipfile.ZipFile(zip_path)
try:
top_level_dir = zipfile.Path(zip, zip.namelist()[0])
except IndexError:
QMessageBox.critical(
self, "Incorrect archive structure",
"Archive is empty"
)
return
# Verify contents
if top_level_dir.joinpath("models").is_dir():
model_path = top_level_dir.joinpath("models")
model_dir_list = list(model_path.iterdir())
is_model_pack = True
else:
model_dir_list = [top_level_dir]
is_model_pack = False
expected_files = ["Link.arc", "metadata.txt"]
for model_dir in model_dir_list:
for f in expected_files:
if not model_dir.joinpath(f).exists():
QMessageBox.critical(
self, "Incorrect archive structure",
"Missing file: %s" % model_dir.joinpath(f).at
)
return
zip.extractall(CUSTOM_MODELS_PATH)
if not is_model_pack:
install_result = model_dir_list[0].name
else:
for model_dir in model_dir_list:
shutil.move(os.path.join(CUSTOM_MODELS_PATH, model_dir.at), os.path.join(CUSTOM_MODELS_PATH, model_dir.name))
shutil.rmtree(os.path.join(CUSTOM_MODELS_PATH, top_level_dir.name))
install_result = "%s models" % len(model_dir_list)
QMessageBox.information(
self, "Installation complete",
"%s installed successfully" % install_result
)
self.update_custom_player_model_list()
self.set_option_value("custom_player_model", model_dir_list[0].name)
except zipfile.BadZipfile:
stack_trace = traceback.format_exc()
print(stack_trace)
QMessageBox.critical(
self, "Failed to unpack model archive",
stack_trace
)
def open_about(self):
text = """Wind Waker Randomizer Version %s<br><br>
Created by LagoLunatic<br><br>
Report issues here:<br><a href=\"https://github.com/LagoLunatic/wwrando/issues\">https://github.com/LagoLunatic/wwrando/issues</a><br><br>
Source code:<br><a href=\"https://github.com/LagoLunatic/wwrando\">https://github.com/LagoLunatic/wwrando</a>""" % VERSION
self.about_dialog = QMessageBox()
self.about_dialog.setTextFormat(Qt.TextFormat.RichText)
self.about_dialog.setWindowTitle("Wind Waker Randomizer")
self.about_dialog.setText(text)
self.about_dialog.setWindowIcon(self.windowIcon())
self.about_dialog.show()
def keyPressEvent(self, event):
if event.key() == Qt.Key_Escape:
self.close()
def closeEvent(self, event):
if not IS_RUNNING_FROM_SOURCE:
# Need to wait for the update checker before exiting, or the program will crash when closing.
self.update_checker_thread.quit()
self.update_checker_thread.wait()
event.accept()
class ModelFilterOut(QSortFilterProxyModel):
def __init__(self):
super(ModelFilterOut, self).__init__()
self.filter_strings = []
def setFilterStrings(self, fstr):
self.filter_strings = fstr
self.invalidateFilter()
def filterAcceptsRow(self, sourceRow, sourceParent):
index0 = self.sourceModel().index(sourceRow, 0, sourceParent)
data = self.sourceModel().data(index0)
num_occurrences = self.filter_strings.count(data)
for i in range(sourceRow):
cur_index = self.sourceModel().index(i, 0, sourceParent)
cur_data = self.sourceModel().data(cur_index)
if cur_data == data:
num_occurrences -= 1
return num_occurrences <= 0
class RandomizerProgressDialog(QProgressDialog):
def __init__(self, title, description, max_val):
QProgressDialog.__init__(self)
self.setWindowTitle(title)
self.setLabelText(description)
self.setMaximum(max_val)
self.setWindowModality(Qt.ApplicationModal)
self.setWindowFlags(Qt.CustomizeWindowHint | Qt.WindowTitleHint)
self.setFixedSize(self.size())
self.setAutoReset(False)
self.setCancelButton(None)
self.show()
class RandomizerThread(QThread):
update_progress = Signal(str, int)
randomization_complete = Signal()
randomization_failed = Signal(str)
def __init__(self, randomizer, profiling=False):
QThread.__init__(self)
self.randomizer = randomizer
self.profiling = profiling
def run(self):
if self.profiling:
import cProfile, pstats
profiler = cProfile.Profile()
profiler.enable()
try:
randomizer_generator = self.randomizer.randomize()
last_update_time = time.time()
while True:
# Need to use a while loop to go through the generator instead of a for loop, as a for loop would silently exit if a StopIteration error ever happened for any reason.
next_option_description, options_finished = next(randomizer_generator)
if options_finished == -1:
break
if time.time()-last_update_time < 0.1:
# Limit how frequently the signal is emitted to 10 times per second.
# Extremely frequent updates (e.g. 1000 times per second) can cause the program to crash with no error message.
continue
self.update_progress.emit(next_option_description, options_finished)
last_update_time = time.time()
except Exception as e:
stack_trace = traceback.format_exc()
error_message = "Randomization failed with error:\n" + str(e) + "\n\n" + stack_trace
self.randomization_failed.emit(error_message)
return
if self.profiling:
profiler.disable()
with open("profileresults.txt", "w") as f:
ps = pstats.Stats(profiler, stream=f).sort_stats("cumulative")
ps.print_stats()
self.randomization_complete.emit()
class UpdateCheckerThread(QThread):
finished_checking_for_updates = Signal(str)
def run(self):
new_version = check_for_updates()
self.finished_checking_for_updates.emit(new_version)
# Allow yaml to load and dump OrderedDicts.
yaml.SafeLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
lambda loader, node: OrderedDict(loader.construct_pairs(node))
)
yaml.Dumper.add_representer(
OrderedDict,
lambda dumper, data: dumper.represent_dict(data.items())
)
| 40.407234 | 269 | 0.7104 |
a85f30668dda392629e51da821a6aea550b63fe2 | 26,110 | py | Python | couchdbkit/designer/fs.py | gelnior/couchdbkit | 8277d6ffd00553ae0b0b2368636460d40f8d8225 | [
"MIT"
] | null | null | null | couchdbkit/designer/fs.py | gelnior/couchdbkit | 8277d6ffd00553ae0b0b2368636460d40f8d8225 | [
"MIT"
] | null | null | null | couchdbkit/designer/fs.py | gelnior/couchdbkit | 8277d6ffd00553ae0b0b2368636460d40f8d8225 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -
#
# This file is part of couchdbkit released under the MIT license.
# See the NOTICE for more information.
import base64
import copy
from hashlib import md5
import logging
import mimetypes
import os
import os.path
import re
from .. import client
from ..exceptions import ResourceNotFound, DesignerError, \
BulkSaveError
from .macros import package_shows, package_views
from .. import utils
if os.name == 'nt':
def _replace_backslash(name):
return name.replace("\\", "/")
def _replace_slash(name):
return name.replace("/", "\\")
else:
def _replace_backslash(name):
return name
def _replace_slash(name):
return name
logger = logging.getLogger(__name__)
class FSDoc(object):
def __init__(self, path, create=False, docid=None, is_ddoc=True):
self.docdir = path
self.ignores = []
self.is_ddoc = is_ddoc
ignorefile = os.path.join(path, '.couchappignore')
if os.path.exists(ignorefile):
# A .couchappignore file is a json file containing a
# list of regexps for things to skip
self.ignores = utils.json.load(open(ignorefile, 'r'))
if not docid:
docid = self.get_id()
self.docid = docid
self._doc = {'_id': self.docid}
if create:
self.create()
def get_id(self):
"""
if there is an _id file, docid is extracted from it,
else we take the current folder name.
"""
idfile = os.path.join(self.docdir, '_id')
if os.path.exists(idfile):
docid = utils.read_file(idfile).split("\n")[0].strip()
if docid: return docid
if self.is_ddoc:
return "_design/%s" % os.path.split(self.docdir)[1]
else:
return os.path.split(self.docdir)[1]
def __repr__(self):
return "<%s (%s/%s)>" % (self.__class__.__name__, self.docdir, self.docid)
def __str__(self):
return utils.json.dumps(self.doc())
def create(self):
if not os.path.isdir(self.docdir):
logger.error("%s directory doesn't exist." % self.docdir)
rcfile = os.path.join(self.docdir, '.couchapprc')
if not os.path.isfile(rcfile):
utils.write_json(rcfile, {})
else:
logger.warning("CouchApp already initialized in %s." % self.docdir)
def push(self, dbs, atomic=True, force=False):
"""Push a doc to a list of database `dburls`. If noatomic is true
each attachments will be sent one by one."""
for db in dbs:
if atomic:
doc = self.doc(db, force=force)
db.save_doc(doc, force_update=True)
else:
doc = self.doc(db, with_attachments=False, force=force)
db.save_doc(doc, force_update=True)
attachments = doc.get('_attachments') or {}
for name, filepath in self.attachments():
if name not in attachments:
logger.debug("attach %s " % name)
db.put_attachment(doc, open(filepath, "r"),
name=name)
logger.debug("%s/%s had been pushed from %s" % (db.uri,
self.docid, self.docdir))
def attachment_stub(self, name, filepath):
att = {}
with open(filepath, "rb") as f:
re_sp = re.compile('\s')
att = {
"data": re_sp.sub('',base64.b64encode(f.read())),
"content_type": ';'.join([_f for _f in mimetypes.guess_type(name) if _f])
}
return att
def doc(self, db=None, with_attachments=True, force=False):
""" Function to reetrieve document object from
document directory. If `with_attachments` is True
attachments will be included and encoded"""
manifest = []
objects = {}
signatures = {}
attachments = {}
self._doc = {'_id': self.docid}
# get designdoc
self._doc.update(self.dir_to_fields(self.docdir, manifest=manifest))
if not 'couchapp' in self._doc:
self._doc['couchapp'] = {}
self.olddoc = {}
if db is not None:
try:
self.olddoc = db.open_doc(self._doc['_id'])
attachments = self.olddoc.get('_attachments') or {}
self._doc.update({'_rev': self.olddoc['_rev']})
except ResourceNotFound:
self.olddoc = {}
if 'couchapp' in self.olddoc:
old_signatures = self.olddoc['couchapp'].get('signatures',
{})
else:
old_signatures = {}
for name, filepath in self.attachments():
signatures[name] = utils.sign_file(filepath)
if with_attachments and not old_signatures:
logger.debug("attach %s " % name)
attachments[name] = self.attachment_stub(name, filepath)
if old_signatures:
for name, signature in list(old_signatures.items()):
cursign = signatures.get(name)
if not cursign:
logger.debug("detach %s " % name)
del attachments[name]
elif cursign != signature:
logger.debug("detach %s " % name)
del attachments[name]
else:
continue
if with_attachments:
for name, filepath in self.attachments():
if old_signatures.get(name) != signatures.get(name) or force:
logger.debug("attach %s " % name)
attachments[name] = self.attachment_stub(name, filepath)
self._doc['_attachments'] = attachments
self._doc['couchapp'].update({
'manifest': manifest,
'objects': objects,
'signatures': signatures
})
if self.docid.startswith('_design/'): # process macros
for funs in ['shows', 'lists', 'updates', 'filters',
'spatial']:
if funs in self._doc:
package_shows(self._doc, self._doc[funs], self.docdir,
objects)
if 'validate_doc_update' in self._doc:
tmp_dict = dict(validate_doc_update=self._doc[
"validate_doc_update"])
package_shows( self._doc, tmp_dict, self.docdir,
objects)
self._doc.update(tmp_dict)
if 'views' in self._doc:
# clean views
# we remove empty views and malformed from the list
# of pushed views. We also clean manifest
views = {}
dmanifest = {}
for i, fname in enumerate(manifest):
if fname.startswith("views/") and fname != "views/":
name, ext = os.path.splitext(fname)
if name.endswith('/'):
name = name[:-1]
dmanifest[name] = i
for vname, value in self._doc['views'].items():
if value and isinstance(value, dict):
views[vname] = value
else:
del manifest[dmanifest["views/%s" % vname]]
self._doc['views'] = views
package_views(self._doc,self._doc["views"], self.docdir,
objects)
if "fulltext" in self._doc:
package_views(self._doc,self._doc["fulltext"], self.docdir,
objects)
return self._doc
def check_ignore(self, item):
for i in self.ignores:
match = re.match(i, item)
if match:
logger.debug("ignoring %s" % item)
return True
return False
def dir_to_fields(self, current_dir='', depth=0,
manifest=[]):
""" process a directory and get all members """
fields={}
if not current_dir:
current_dir = self.docdir
for name in os.listdir(current_dir):
current_path = os.path.join(current_dir, name)
rel_path = _replace_backslash(utils.relpath(current_path, self.docdir))
if name.startswith("."):
continue
elif self.check_ignore(name):
continue
elif depth == 0 and name.startswith('_'):
# files starting with "_" are always "special"
continue
elif name == '_attachments':
continue
elif depth == 0 and (name == 'couchapp' or name == 'couchapp.json'):
# we are in app_meta
if name == "couchapp":
manifest.append('%s/' % rel_path)
content = self.dir_to_fields(current_path,
depth=depth+1, manifest=manifest)
else:
manifest.append(rel_path)
content = utils.read_json(current_path)
if not isinstance(content, dict):
content = { "meta": content }
if 'signatures' in content:
del content['signatures']
if 'manifest' in content:
del content['manifest']
if 'objects' in content:
del content['objects']
if 'length' in content:
del content['length']
if 'couchapp' in fields:
fields['couchapp'].update(content)
else:
fields['couchapp'] = content
elif os.path.isdir(current_path):
manifest.append('%s/' % rel_path)
fields[name] = self.dir_to_fields(current_path,
depth=depth+1, manifest=manifest)
else:
logger.debug("push %s" % rel_path)
content = ''
if name.endswith('.json'):
try:
content = utils.read_json(current_path)
except ValueError:
logger.error("Json invalid in %s" % current_path)
else:
try:
content = utils.read_file(current_path).strip()
except UnicodeDecodeError:
logger.warning("%s isn't encoded in utf8" % current_path)
content = utils.read_file(current_path, utf8=False)
try:
content.encode('utf-8')
except UnicodeError:
logger.warning(
"plan B didn't work, %s is a binary" % current_path)
logger.warning("use plan C: encode to base64")
content = "base64-encoded;%s" % base64.b64encode(
content)
# remove extension
name, ext = os.path.splitext(name)
if name in fields:
logger.warning(
"%(name)s is already in properties. Can't add (%(fqn)s)" % {
"name": name, "fqn": rel_path })
else:
manifest.append(rel_path)
fields[name] = content
return fields
def _process_attachments(self, path, vendor=None):
""" the function processing directory to yeld
attachments. """
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
for dirname in dirs:
if dirname.startswith('.'):
dirs.remove(dirname)
elif self.check_ignore(dirname):
dirs.remove(dirname)
if files:
for filename in files:
if filename.startswith('.'):
continue
elif self.check_ignore(filename):
continue
else:
filepath = os.path.join(root, filename)
name = utils.relpath(filepath, path)
if vendor is not None:
name = os.path.join('vendor', vendor, name)
name = _replace_backslash(name)
yield (name, filepath)
def attachments(self):
""" This function yield a tuple (name, filepath) corresponding
to each attachment (vendor included) in the couchapp. `name`
is the name of attachment in `_attachments` member and `filepath`
the path to the attachment on the disk.
attachments are processed later to allow us to send attachments inline
or one by one.
"""
# process main attachments
attachdir = os.path.join(self.docdir, "_attachments")
for attachment in self._process_attachments(attachdir):
yield attachment
vendordir = os.path.join(self.docdir, 'vendor')
if not os.path.isdir(vendordir):
logger.debug("%s don't exist" % vendordir)
return
for name in os.listdir(vendordir):
current_path = os.path.join(vendordir, name)
if os.path.isdir(current_path):
attachdir = os.path.join(current_path, '_attachments')
if os.path.isdir(attachdir):
for attachment in self._process_attachments(attachdir,
vendor=name):
yield attachment
def index(self, dburl, index):
if index is not None:
return "%s/%s/%s" % (dburl, self.docid, index)
elif os.path.isfile(os.path.join(self.docdir, "_attachments",
'index.html')):
return "%s/%s/index.html" % (dburl, self.docid)
return False
def document(path, create=False, docid=None, is_ddoc=True):
""" simple function to retrive a doc object from filesystem """
return FSDoc(path, create=create, docid=docid, is_ddoc=is_ddoc)
def push(path, dbs, atomic=True, force=False, docid=None):
""" push a document from the fs to one or more dbs. Identic to
couchapp push command """
if not isinstance(dbs, (list, tuple)):
dbs = [dbs]
doc = document(path, create=False, docid=docid)
doc.push(dbs, atomic=atomic, force=force)
docspath = os.path.join(path, '_docs')
if os.path.exists(docspath):
pushdocs(docspath, dbs, atomic=atomic)
def pushapps(path, dbs, atomic=True, export=False, couchapprc=False):
""" push all couchapps in one folder like couchapp pushapps command
line """
if not isinstance(dbs, (list, tuple)):
dbs = [dbs]
apps = []
for d in os.listdir(path):
appdir = os.path.join(path, d)
if os.path.isdir(appdir):
if couchapprc and not os.path.isfile(os.path.join(appdir,
'.couchapprc')):
continue
doc = document(appdir)
if not atomic:
doc.push(dbs, atomic=False)
else:
apps.append(doc)
if apps:
if export:
docs= [doc.doc() for doc in apps]
jsonobj = {'docs': docs}
return jsonobj
else:
for db in dbs:
docs = []
docs = [doc.doc(db) for doc in apps]
try:
db.save_docs(docs)
except BulkSaveError as e:
docs1 = []
for doc in e.errors:
try:
doc['_rev'] = db.get_rev(doc['_id'])
docs1.append(doc)
except ResourceNotFound:
pass
if docs1:
db.save_docs(docs1)
def pushdocs(path, dbs, atomic=True, export=False):
""" push multiple docs in a path """
if not isinstance(dbs, (list, tuple)):
dbs = [dbs]
docs = []
for d in os.listdir(path):
docdir = os.path.join(path, d)
if docdir.startswith('.'):
continue
elif os.path.isfile(docdir):
if d.endswith(".json"):
doc = utils.read_json(docdir)
docid, ext = os.path.splitext(d)
doc.setdefault('_id', docid)
doc.setdefault('couchapp', {})
if not atomic:
for db in dbs:
db.save_doc(doc, force_update=True)
else:
docs.append(doc)
else:
doc = document(docdir, is_ddoc=False)
if not atomic:
doc.push(dbs, atomic=False)
else:
docs.append(doc)
if docs:
if export:
docs1 = []
for doc in docs:
if hasattr(doc, 'doc'):
docs1.append(doc.doc())
else:
docs1.append(doc)
jsonobj = {'docs': docs1}
return jsonobj
else:
for db in dbs:
docs1 = []
for doc in docs:
if hasattr(doc, 'doc'):
docs1.append(doc.doc(db))
else:
newdoc = doc.copy()
try:
rev = db.get_rev(doc['_id'])
newdoc.update({'_rev': rev})
except ResourceNotFound:
pass
docs1.append(newdoc)
try:
db.save_docs(docs1)
except BulkSaveError as e:
# resolve conflicts
docs1 = []
for doc in e.errors:
try:
doc['_rev'] = db.get_rev(doc['_id'])
docs1.append(doc)
except ResourceNotFound:
pass
if docs1:
db.save_docs(docs1)
def clone(db, docid, dest=None, rev=None):
"""
Clone a CouchDB document to the fs.
"""
if not dest:
dest = docid
path = os.path.normpath(os.path.join(os.getcwd(), dest))
if not os.path.exists(path):
os.makedirs(path)
if not rev:
doc = db.open_doc(docid)
else:
doc = db.open_doc(docid, rev=rev)
docid = doc['_id']
metadata = doc.get('couchapp', {})
# get manifest
manifest = metadata.get('manifest', {})
# get signatures
signatures = metadata.get('signatures', {})
# get objects refs
objects = metadata.get('objects', {})
# create files from manifest
if manifest:
for filename in manifest:
logger.debug("clone property: %s" % filename)
filepath = os.path.join(path, filename)
if filename.endswith('/'):
if not os.path.isdir(filepath):
os.makedirs(filepath)
elif filename == "couchapp.json":
continue
else:
parts = utils.split_path(filename)
fname = parts.pop()
v = doc
while 1:
try:
for key in parts:
v = v[key]
except KeyError:
break
# remove extension
last_key, ext = os.path.splitext(fname)
# make sure key exist
try:
content = v[last_key]
except KeyError:
break
if isinstance(content, str):
_ref = md5(utils.to_bytestring(content)).hexdigest()
if objects and _ref in objects:
content = objects[_ref]
if content.startswith('base64-encoded;'):
content = base64.b64decode(content[15:])
if fname.endswith('.json'):
content = utils.json.dumps(content).encode('utf-8')
del v[last_key]
# make sure file dir have been created
filedir = os.path.dirname(filepath)
if not os.path.isdir(filedir):
os.makedirs(filedir)
utils.write_content(filepath, content)
# remove the key from design doc
temp = doc
for key2 in parts:
if key2 == key:
if not temp[key2]:
del temp[key2]
break
temp = temp[key2]
# second pass for missing key or in case
# manifest isn't in app
for key in doc.keys():
if key.startswith('_'):
continue
elif key in ('couchapp'):
app_meta = copy.deepcopy(doc['couchapp'])
if 'signatures' in app_meta:
del app_meta['signatures']
if 'manifest' in app_meta:
del app_meta['manifest']
if 'objects' in app_meta:
del app_meta['objects']
if 'length' in app_meta:
del app_meta['length']
if app_meta:
couchapp_file = os.path.join(path, 'couchapp.json')
utils.write_json(couchapp_file, app_meta)
elif key in ('views'):
vs_dir = os.path.join(path, key)
if not os.path.isdir(vs_dir):
os.makedirs(vs_dir)
for vsname, vs_item in doc[key].items():
vs_item_dir = os.path.join(vs_dir, vsname)
if not os.path.isdir(vs_item_dir):
os.makedirs(vs_item_dir)
for func_name, func in vs_item.items():
filename = os.path.join(vs_item_dir, '%s.js' %
func_name)
utils.write_content(filename, func)
logger.warning("clone view not in manifest: %s" % filename)
elif key in ('shows', 'lists', 'filter', 'update'):
showpath = os.path.join(path, key)
if not os.path.isdir(showpath):
os.makedirs(showpath)
for func_name, func in doc[key].items():
filename = os.path.join(showpath, '%s.js' %
func_name)
utils.write_content(filename, func)
logger.warning(
"clone show or list not in manifest: %s" % filename)
else:
filedir = os.path.join(path, key)
if os.path.exists(filedir):
continue
else:
logger.warning("clone property not in manifest: %s" % key)
if isinstance(doc[key], (list, tuple)):
utils.write_json(filedir + ".json", doc[key])
elif isinstance(doc[key], dict):
if not os.path.isdir(filedir):
os.makedirs(filedir)
for field, value in doc[key].items():
fieldpath = os.path.join(filedir, field)
if isinstance(value, str):
if value.startswith('base64-encoded;'):
value = base64.b64decode(content[15:])
utils.write_content(fieldpath, value)
else:
utils.write_json(fieldpath + '.json', value)
else:
value = doc[key]
if not isinstance(value, str):
value = str(value)
utils.write_content(filedir, value)
# save id
idfile = os.path.join(path, '_id')
utils.write_content(idfile, doc['_id'])
utils.write_json(os.path.join(path, '.couchapprc'), {})
if '_attachments' in doc: # process attachments
attachdir = os.path.join(path, '_attachments')
if not os.path.isdir(attachdir):
os.makedirs(attachdir)
for filename in doc['_attachments'].keys():
if filename.startswith('vendor'):
attach_parts = utils.split_path(filename)
vendor_attachdir = os.path.join(path, attach_parts.pop(0),
attach_parts.pop(0), '_attachments')
filepath = os.path.join(vendor_attachdir, *attach_parts)
else:
filepath = os.path.join(attachdir, filename)
filepath = _replace_slash(filepath)
currentdir = os.path.dirname(filepath)
if not os.path.isdir(currentdir):
os.makedirs(currentdir)
if signatures.get(filename) != utils.sign_file(filepath):
stream = db.fetch_attachment(docid, filename, stream=True)
with open(filepath, 'wb') as f:
for chunk in stream:
f.write(chunk)
logger.debug("clone attachment: %s" % filename)
logger.debug("%s/%s cloned in %s" % (db.uri, docid, dest))
def clone_design_doc(source, dest, rev=None):
""" Clone a design document from it's url like couchapp does.
"""
try:
dburl, docid = source.split('_design/')
except ValueError:
raise DesignerError("%s isn't a valid source" % source)
db = client.Database(dburl[:-1], create=False)
clone(db, docid, dest, rev=rev)
| 37.140825 | 93 | 0.490578 |
954c87400d2067376056253351a50270720c2684 | 17,710 | py | Python | 20211206/solveMDP_richLow.py | dongxulee/lifeCycleRefine | 6ca9670dea50150aabe31f86578323cec0ab018c | [
"MIT"
] | null | null | null | 20211206/solveMDP_richLow.py | dongxulee/lifeCycleRefine | 6ca9670dea50150aabe31f86578323cec0ab018c | [
"MIT"
] | null | null | null | 20211206/solveMDP_richLow.py | dongxulee/lifeCycleRefine | 6ca9670dea50150aabe31f86578323cec0ab018c | [
"MIT"
] | null | null | null | import numpy as np
import jax.numpy as jnp
from jax.numpy import interp
from jax import jit, partial, random, vmap
from tqdm import tqdm
import pandas as pd
import warnings
import os.path
warnings.filterwarnings("ignore")
np.printoptions(precision=2)
AgentType = ["richLow"]
Beta_r = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.10]
for beta_r in Beta_r:
for agentType in AgentType:
'''
Constants
'''
# discounting factor
beta = 1/(1+beta_r)
# utility function parameter
gamma = 3.0
print("agentType: ", agentType)
print("beta: ", beta)
print("gamma: ", gamma)
# time line, starts at 20 ends at 80
T_min = 0
T_max = 60
T_R = 45
# relative importance of housing consumption and non durable consumption
alpha = 0.95
# parameter used to calculate the housing consumption
kappa = 0.3
# uB associated parameter
B = 2.0
# social welfare after the unemployment
welfare = 20
# tax rate before and after retirement
tau_L = 0.2
tau_R = 0.1
# number of states S
nS = 8
# number of states e
nE = 2
# housing state
nO = 2
# experience state
nZ = 2
'''
Economic state calibration
'''
# probability of survival
Pa = jnp.array(np.load("constant/prob.npy"))
############################################################################################################ skill and finance literacy
if "rich" in agentType:
# deterministic income
detEarning = jnp.array(np.load("constant/highIncomeDetermined.npy"))
else:
detEarning = jnp.array(np.load("constant/lowIncomeDetermined.npy"))
if "High" in agentType:
# stock transaction fee
Kc = 0.02
# stock participation cost
c_k = 20
else:
# stock transaction fee
Kc = 0
# stock participation cost
c_k = 5
############################################################################################################
# Define transition matrix of economical states S
Ps = np.genfromtxt('constant/Ps.csv',delimiter=',')
Ps = jnp.array(Ps)
# The possible GDP growth, stock return, bond return
gkfe = np.genfromtxt('constant/gkfe.csv',delimiter=',')
gkfe = jnp.array(gkfe)
# GDP growth depending on current S state
gGDP = gkfe[:,0]/100
# risk free interest rate depending on current S state
r_b = gkfe[:,1]/100
# stock return depending on current S state
r_k = gkfe[:,2]/100
# unemployment rate depending on current S state
Pe = gkfe[:,7:]/100
Pe = Pe[:,::-1]
'''
Real Econ Shock calibration
'''
# # empirical econ
# empiricalEcon = pd.read_csv('constant/empiricalEcon.csv',delimiter=',')
# empiricalEcon = empiricalEcon.set_index("year")
# empiricalEcon = empiricalEcon/100
# # match the empirical states in memoryState
# memoryState = np.column_stack((gGDP, r_k, r_b))
# def similarity(actualState, memoryState = memoryState):
# '''
# state is charactorized as 3 dim vector
# '''
# diffState = np.sum(np.abs(actualState - memoryState), axis = 1)
# distance = np.min(diffState)
# state = np.argmin(diffState)
# return distance, state
# similarity, imaginedEconState = np.vectorize(similarity, signature='(n)->(),()')(empiricalEcon.values)
# # generate economic states of a certain time window
# def generateEcon(yearBegin, yearCount,imaginedEconState,empiricalEcon):
# # single economy generation
# years = empiricalEcon.index.values
# econ = jnp.array(imaginedEconState[np.where(years == yearBegin)[0][0]:np.where(years == yearBegin)[0][0]+yearCount],dtype = int)
# econRate = empiricalEcon[np.where(years == yearBegin)[0][0]:np.where(years == yearBegin)[0][0]+yearCount].values
# return econ, econRate
# #**********************************simulation change*****************************************************#
# yearBegin = 1999
# yearCount = 20
# econ, econRate = generateEcon(yearBegin, yearCount,imaginedEconState,empiricalEcon)
'''
calculate stationary distribution to prepare for simulation
'''
# calculate the stationary distribution of econ state and employment state
S_distribution = jnp.ones(nS)/nS
for _ in range(100):
S_distribution = jnp.matmul(S_distribution, Ps)
#P(0,1)
P01 = jnp.dot(Pe[:,0],S_distribution)
#P(1,0)
P10 = jnp.dot(Pe[:,1],S_distribution)
jnp.array([[1-P01, P01],[P10, 1-P10]])
E_distribution = jnp.ones(2)/2
for _ in range(100):
E_distribution = jnp.matmul(E_distribution, jnp.array([[1-P01, P01],[P10, 1-P10]]))
'''
401k related constants
'''
# 401k amount growth rate
r_bar = 0.02
# income fraction goes into 401k
yi = 0.04
Pa = Pa[:T_max]
Nt = [np.sum(Pa[t:]) for t in range(T_min,T_max)]
# factor used to calculate the withdraw amount
Dn = [(r_bar*(1+r_bar)**N)/((1+r_bar)**N - 1) for N in Nt]
Dn[-1] = 1
Dn = jnp.array(Dn)
# cash accumulated before retirement
nEarning = yi*E_distribution[1]*(1+jnp.dot(S_distribution,gGDP))*detEarning[:45]
n_balance = np.zeros(T_R)
for t in range(T_R):
nMultiplier = jnp.array([(1+r_bar)**(t-i) for i in range(t)])
n_balance[t] = (nEarning[:t] * nMultiplier).sum()
# cash payouts after retirement
n_payout = []
amount = n_balance[-1]
for t in range(45, 60):
n_payout.append(amount*Dn[t])
amount = amount - amount*Dn[t]
n_balance = jnp.append(n_balance,amount)
n_payout = jnp.array(n_payout)
'''
housing related constants
'''
# variable associated with housing and mortgage
# age limit of buying a house
ageLimit = 30
mortgageLength = 30
# mortgage rate
rh = 0.045
# housing unit
H = 1000
# max rent unit
Rl = 500
# housing price constant
pt = 2*250/1000
# 30k rent 1000 sf
pr = 2*10/1000 * 2
# constant cost
c_h = 5
c_s = H*pt*0.4
# Dm is used to update the mortgage payment
Dm = [(1+rh) - rh*(1+rh)**(T_max - t)/((1+rh)**(T_max-t)-1) for t in range(T_min, T_max)]
Dm[-1] = 0
Dm = jnp.array(Dm)
# 30 year mortgage
Ms = []
M = H*pt*0.8
m = M*(1+rh) - Dm[30]*M
for i in range(30, T_max):
Ms.append(M)
M = M*(1+rh) - m
Ms.append(0)
Ms = jnp.array(Ms)
'''
Discretize the state space
Discretize the action space
'''
# actions dicretization(hp, cp, kp)
numGrid = 20
As = np.array(np.meshgrid(np.linspace(0.001,0.999,numGrid), np.linspace(0,1,numGrid), [0,1])).T.reshape(-1,3)
As = jnp.array(As)
# wealth discretization
wealthLevel = 300
polynomialDegree = 2
ws = jnp.linspace(0, np.power(wealthLevel,1/polynomialDegree), numGrid)**polynomialDegree
# age of last time bought a house value only count when o = 1.
aBuy = np.array(range(ageLimit))
# dimentions of the state
dim = (ws.size, aBuy.size, nS, nE, nO, nZ)
dimSize = len(dim)
xgrid = np.array([[w,ab,s,e,o,z] for w in ws
for ab in aBuy
for s in range(nS)
for e in range(nE)
for o in range(nO)
for z in range(nZ)]).reshape(dim + (dimSize,))
Xs = xgrid.reshape((np.prod(dim),dimSize))
Xs = jnp.array(Xs)
Vgrid = np.zeros(dim + (T_max,))
# start of function definitions
nX = Xs.shape[0]
nA = As.shape[0]
'''
Functions Definitions
'''
# GDP growth depending on current S state
# gGDP = jnp.array(econRate[:,0])
#Define the earning function, which applies for both employment status and 8 econ states
@partial(jit, static_argnums=(0,))
def y(t, x):
'''
x = [w,ab,s,e,o,z]
x = [0,1, 2,3,4,5]
'''
if t < T_R:
# if os.path.exists("richLow.npy"):
# return detEarning[t] * (1+gGDP[t]) * x[3] + (1-x[3]) * welfare
# else:
return detEarning[t] * (1+gGDP[jnp.array(x[2], dtype = jnp.int8)]) * x[3] + (1-x[3]) * welfare
else:
return detEarning[-1]
#Earning after tax and fixed by transaction in and out from 401k account
@partial(jit, static_argnums=(0,))
def yAT(t,x):
yt = y(t, x)
if t < T_R:
# yi portion of the income will be put into the 401k if employed
return (1-tau_L)*(yt * (1-yi))*x[3] + (1-x[3])*yt
else:
# t >= T_R, n/discounting amount will be withdraw from the 401k
return (1-tau_R)*yt + n_payout[t-T_R]
#Define the utility function
@jit
def u(c):
return jnp.nan_to_num(x = (jnp.power(c, 1-gamma) - 1)/(1 - gamma), nan = -jnp.inf)
#Define the bequeath function, which is a function of bequeath wealth
@jit
def uB(tb):
return B*u(tb)
#Reward function depends on the housing and non-housing consumption
@jit
def R(a):
'''
Input:
a = [c,b,k,h,action]
a = [0,1,2,3,4]
'''
c = a[:,0]
b = a[:,1]
k = a[:,2]
h = a[:,3]
C = jnp.power(c, alpha) * jnp.power(h, 1-alpha)
return u(C) + (-1/((c > 0) * (b >= 0) * (k >= 0) * (h > 0)) + 1)
# pc*qc / (ph*qh) = alpha/(1-alpha)
@partial(jit, static_argnums=(0,))
def feasibleActions(t, x):
'''
x = [w,ab,s,e,o,z]
x = [0,1, 2,3,4,5]
a = [c,b,k,h,action]
a = [0,1,2,3,4]
'''
# owner
sell = As[:,2]
ab = jnp.array(x[1], dtype = jnp.int8)
# last term is the tax deduction of the interest portion of mortgage payment
payment = ((t-ab) > 0)*((t-ab) <= mortgageLength)*(((t<=T_R)*tau_L + (t>T_R)*tau_R)*Ms[t-ab]*rh - m)
# this is the fire sell term, as long as we could afford the payment, do not sell
sell = (yAT(t,x) + x[0] + payment > 0)*jnp.zeros(nA) + (yAT(t,x) + x[0] + payment <= 0)*jnp.ones(nA)
budget1 = yAT(t,x) + x[0] + (1-sell)*payment + sell*(H*pt - Ms[t-ab] - c_s)
h = H*(1+kappa)*(1-sell) + sell*jnp.clip(budget1*As[:,0]*(1-alpha)/pr, a_max = Rl)
c = budget1*As[:,0]*(1-sell) + sell*(budget1*As[:,0] - h*pr)
budget2 = budget1*(1-As[:,0])
k = budget2*As[:,1]
k = k - (1-x[5])*(k>0)*c_k
k = k*(1-Kc)
b = budget2*(1-As[:,1])
owner_action = jnp.column_stack((c,b,k,h,sell))
# renter
buy = As[:,2]*(t < ageLimit)
budget1 = yAT(t,x) + x[0] - buy*(H*pt*0.2 + c_h)
h = jnp.clip(budget1*As[:,0]*(1-alpha)/pr, a_max = Rl)*(1-buy) + buy*jnp.ones(nA)*H*(1+kappa)
c = (budget1*As[:,0] - h*pr)*(1-buy) + buy*budget1*As[:,0]
budget2 = budget1*(1-As[:,0])
k = budget2*As[:,1]
k = k - (1-x[5])*(k>0)*c_k
k = k*(1-Kc)
b = budget2*(1-As[:,1])
renter_action = jnp.column_stack((c,b,k,h,buy))
actions = x[4]*owner_action + (1-x[4])*renter_action
return actions
@partial(jit, static_argnums=(0,))
def transition(t,a,x):
'''
Input:
x = [w,ab,s,e,o,z]
x = [0,1, 2,3,4,5]
a = [c,b,k,h,action]
a = [0,1,2,3,4]
Output:
w_next
ab_next
s_next
e_next
o_next
z_next
prob_next
'''
nA = a.shape[0]
s = jnp.array(x[2], dtype = jnp.int8)
e = jnp.array(x[3], dtype = jnp.int8)
# actions taken
b = a[:,1]
k = a[:,2]
action = a[:,4]
w_next = ((1+r_b[s])*b + jnp.outer(k,(1+r_k)).T).T.flatten().repeat(nE)
ab_next = (1-x[4])*(t*(action == 1)).repeat(nS*nE) + x[4]*(x[1]*jnp.ones(w_next.size))
s_next = jnp.tile(jnp.arange(nS),nA).repeat(nE)
e_next = jnp.column_stack((e.repeat(nA*nS),(1-e).repeat(nA*nS))).flatten()
z_next = x[5]*jnp.ones(w_next.size) + ((1-x[5]) * (k > 0)).repeat(nS*nE)
# job status changing probability and econ state transition probability
pe = Pe[s, e]
ps = jnp.tile(Ps[s], nA)
prob_next = jnp.column_stack(((1-pe)*ps,pe*ps)).flatten()
# owner
o_next_own = (x[4] - action).repeat(nS*nE)
# renter
o_next_rent = action.repeat(nS*nE)
o_next = x[4] * o_next_own + (1-x[4]) * o_next_rent
return jnp.column_stack((w_next,ab_next,s_next,e_next,o_next,z_next,prob_next))
# used to calculate dot product
@jit
def dotProduct(p_next, uBTB):
return (p_next*uBTB).reshape((p_next.shape[0]//(nS*nE), (nS*nE))).sum(axis = 1)
# define approximation of fit
@jit
def fit(v, xpp):
value = vmap(partial(jnp.interp,xp = ws))(x = xpp[:,0], fp = v[:,jnp.array(xpp[:,1], dtype = int),
jnp.array(xpp[:,2], dtype = int),
jnp.array(xpp[:,3], dtype = int),
jnp.array(xpp[:,4], dtype = int),
jnp.array(xpp[:,5], dtype = int)].T)
return jnp.nan_to_num(x = value, nan = -jnp.inf)
@partial(jit, static_argnums=(0,))
def V(t,V_next,x):
'''
x = [w,ab,s,e,o,z]
x = [0,1, 2,3,4,5]
xp:
w_next 0
ab_next 1
s_next 2
e_next 3
o_next 4
z_next 5
prob_next 6
'''
actions = feasibleActions(t,x)
xp = transition(t,actions,x)
# bequeath utility, wealth level, the retirement account, heir sell the house at a cost of 25k
TB = xp[:,0] + n_balance[t] + xp[:,4]*(H*pt-Ms[jnp.array(t-xp[:,1], dtype = jnp.int8)]*(1+rh) - 25)
bequeathU = uB(TB)
if t == T_max-1:
Q = R(actions) + beta * dotProduct(xp[:,6], bequeathU)
else:
Q = R(actions) + beta * dotProduct(xp[:,6], Pa[t]*fit(V_next, xp) + (1-Pa[t])*bequeathU)
Q = Q + (-jnp.inf)*(x[1] >= t)
v = Q.max()
return v
# @partial(jit, static_argnums=(0,))
def V_solve(t,V_next,x):
'''
x = [w,ab,s,e,o,z]
x = [0,1, 2,3,4,5]
xp:
w_next 0
ab_next 1
s_next 2
e_next 3
o_next 4
z_next 5
prob_next 6
'''
actions = feasibleActions(t,x)
xp = transition(t,actions,x)
# bequeath utility, wealth level, the retirement account, heir sell the house at a cost of 25k
TB = xp[:,0] + n_balance[t] + xp[:,4]*(H*pt-Ms[jnp.array(t-xp[:,1], dtype = jnp.int8)]*(1+rh) - 25)
bequeathU = uB(TB)
if t == T_max-1:
Q = R(actions) + beta * dotProduct(xp[:,6], bequeathU)
else:
Q = R(actions) + beta * dotProduct(xp[:,6], Pa[t]*fit(V_next, xp) + (1-Pa[t])*bequeathU)
Q = Q + (-jnp.inf)*(x[1] >= t)
v = Q.max()
cbkha = actions[Q.argmax()]
return v, cbkha
###################################solving the model##################################################
fileName = agentType + "_" + str(beta_r) + "_" + str(gamma)
if os.path.exists(fileName + ".npy"):
print("Model Solved! ")
else:
for t in tqdm(range(T_max-1,T_min-1, -1)):
if t == T_max-1:
v = vmap(partial(V,t,Vgrid[:,:,:,:,:,:,t]))(Xs)
else:
v = vmap(partial(V,t,Vgrid[:,:,:,:,:,:,t+1]))(Xs)
Vgrid[:,:,:,:,:,:,t] = v.reshape(dim)
np.save(fileName,Vgrid) | 37.84188 | 144 | 0.469622 |
830b3207dce050cf0b9ed9117c77c880e5e1a023 | 627 | py | Python | src/manage.py | leksuss/glassdb | 247777f5023df9ff92971ddf6315f0257610ece8 | [
"MIT"
] | null | null | null | src/manage.py | leksuss/glassdb | 247777f5023df9ff92971ddf6315f0257610ece8 | [
"MIT"
] | null | null | null | src/manage.py | leksuss/glassdb | 247777f5023df9ff92971ddf6315f0257610ece8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djanger.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.5 | 73 | 0.682616 |
52446f5104613ca175484baf5d2ad224316a509d | 13,995 | py | Python | stackexchange2json.py | nad2000/Stackexchange-Parsing | e9e8687e92b9ee99e65986f657d04eceef56c098 | [
"Apache-2.0"
] | null | null | null | stackexchange2json.py | nad2000/Stackexchange-Parsing | e9e8687e92b9ee99e65986f657d04eceef56c098 | [
"Apache-2.0"
] | null | null | null | stackexchange2json.py | nad2000/Stackexchange-Parsing | e9e8687e92b9ee99e65986f657d04eceef56c098 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -------------------------------------------------------------------------
# Name: stackexchange.py
#
# Author: Radomirs Cirskis
#
# Created: 2016-07-06
# Licence: WTFPL
# -------------------------------------------------------------------------
## NB! on MS Windows set UTF-8 for the console (cmd.exe): chcp 65001
import requests
from datetime import datetime, date, timezone
import os
import json
import argparse
import xlrd
from collections import OrderedDict
import tinys3
from multiprocessing import Pool, TimeoutError
import time
from fake_useragent import UserAgent
import config
def to_epoch(timestamp=None):
"""
Converts datetime or date into Unix Epoch
"""
if timestamp is None:
return None
if type(timestamp) is datetime:
return int(timestamp.replace(tzinfo=timezone.utc).timestamp())
if type(timestamp) is date:
return int(datetime.combine(
timestamp,
datetime.min.time())
.replace(tzinfo=timezone.utc)
.timestamp())
else:
return int(timestamp)
def lazy_property(fn):
"""Decorator that makes a property lazy-evaluated.
"""
attr_name = '_lazy_' + fn.__name__
@property
def _lazy_property(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _lazy_property
class S3Bucket(object):
@lazy_property
def conn(self):
"""
Establishes connection to S3 bucket
"""
return tinys3.Connection(
config.S3_ACCESS_KEY,
config.S3_SECRET_KEY,
default_bucket=self.name)
def __init__(self, name=None):
self.name = name if name else config.S3_BUCKET
def upload(self, file_name):
_, output_file_name = os.path.split(file_name)
with open(file_name, 'rb') as f:
self.conn.upload(output_file_name, f)
class Scraper(object):
"""
Encapsulates Stackexchange scraping
"""
def __init__(self, s3=True, workers=config.WORKERS, verbose=False):
self.s3 = s3
self.workers = workers
self.verbose = verbose
_ = self.sites ## pre-cache the 'sites'
@lazy_property
def output_dir(self):
"""
Determines the output directory from the configuration
and creates it if it doesn't exist yet.
"""
output_dir = os.path.join(config.OUTPUT_DIR)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
return output_dir
@lazy_property
def ua(self):
return UserAgent()
@property
def user_agent(self):
return self.ua.random
@lazy_property
def sites(self):
"""
Returns a dictionary of all Stackexchange sites
"""
sites_json_filename = os.path.join(self.output_dir, "sites.json")
if os.path.exists(sites_json_filename):
with open(sites_json_filename, "r") as sf:
sites = json.load(sf)
else:
url = (config.API_BASE_URL + "sites?pagesize=10000&filter="
"!SmNnbu6IrvLP5nC(hk")
for delay in range(3, 28, 5): # 5 attempts MAX with increasing delay
try:
resp = requests.get(
url,
headers={'User-Agent': self.user_agent}).json()
except requests.exceptions.ConnectionError as ex:
print("!!!", ex)
time.sleep(delay)
continue
if "error_id" in resp:
if resp["error_id"] != 502:
print("!!! Error querying the site '%s':" % site)
print(json.dumps(resp, indent=4))
time.sleep(delay) # wait for a while
else:
break # success
else:
return {}
sites = {item["api_site_parameter"]: item for item in resp.get("items")}
with open(sites_json_filename, "w") as sf:
json.dump(sites, sf)
return sites
@lazy_property
def bucket(self):
return S3Bucket()
def get_question(self, question_id):
"""
Retrieves a question with all answers and comments
"""
url = (
config.API_BASE_URL +
"%s?order=desc&sort=activity&site=stackoverflow&filter="
"!)Ehv2Yl*OQfQ*ji0eSXZuEg.YqNfPFiEVg2emRci8aiNY.Xc-"
) % question_id
return requests.get(url).json().get("items")
def questions(self, *, site="meta", fromdate=None, todate=None):
"""
Generator for retrieving the questions starting from `fromdate` till `todate`
`fromdate` and `todate` are either datetime, date, or int (Unix Epoch)
The iterator will continue to retrieve the items until the output is
empty or the flag "has_more" is set FALSE, eg:
{
items: [],
has_more: true,
quota_max: 300,
quota_remaining: 295
}
"""
page = 1
while True: ## executes until reached the end:
if self.verbose:
print("*** Processing site %r, page %d." % (site, page))
url = (config.API_BASE_URL + "questions?filter="
"!)Ehv2Yl*OhhLOkeHr5)YcUAgEK*(hc7aypu_0Y_ehVcszKs.-"
"&order=desc&sort=creation"
"&site=%s&page=%d" % (site, page))
if fromdate:
url += "fromdate=%i" % to_epoch(fromdate)
if todate:
url += "todate=%i" % to_epoch(todate)
for delay in range(3, 53, 5): # 10 attempts MAX with increasing delay
try:
resp = requests.get(
url,
headers={'User-Agent': self.user_agent}).json()
except requests.exceptions.ConnectionError as ex:
print("!!!", ex)
time.sleep(delay)
continue
if "error_id" in resp:
if resp["error_id"] != 502:
print("!!! Error querying the site '%s':" % site)
print(json.dumps(resp, indent=4))
time.sleep(delay) # wait for a while
else:
break # success
else:
print("!!! Failed to retrieve the questions form the site '%s'" % site)
return
items = resp.get("items")
if items:
for item in items:
yield item
else:
if page == 1:
print("!!! No items found for the site '%s'" % site)
break ## reached 'END'
if not resp.get("has_more"): ## the last page reached
break
page += 1
def site_url(self, site):
s = self.sites.get(site)
return s.get("site_url") if s else None
def site_name(self, site):
s = self.sites.get(site)
return s.get("name") if s else None
def find_site_by_url(self, url):
for site in self.sites.values():
if url.startswith(site["site_url"]):
return site
else:
raise Exception("Site not found for %r" % url)
def to_output_json(self, item, site=None):
"""
Formats post item accoding to the spec:
{
"abstract": <question asked>,
"external_id": stackexchange_<section name with hyphens for spaces>_<question with hypens replacing the spaces>,
"date": "<date the question was asked in UTC ISO format, YYYY-MM-DDTHH:MM+00:00; if the hours and minutes are not available, use 00:00 for the time>",
"title": <question asked>
"url": "<url linking back to the page>",
"words": "<just the words, without html or javascript code; the question and the answers>",
"meta": {
"stackexchange": {}
}
}
"""
if site:
site_name = self.sites[site]["name"]
else:
site_name = self.find_site_by_url(item["link"])["name"]
# collect words:
words = item["body_markdown"]
if item["is_answered"] and "answers" in item:
words += ' ' + ' '.join(a["body_markdown"]
for a in item["answers"])
external_id = ("stackexchange_%s_%s" % (
site_name, item["title"])).replace(' ', '-')
creation_date = datetime.fromtimestamp(
item["creation_date"]).isoformat()
return OrderedDict([
("external_id", external_id),
("abstract", item["title"]),
("date", creation_date),
("title", item["title"]),
("url", item["link"]),
("words", words),
("meta", {
"stackexchange": {"forum": site_name}
})
])
def process_site(self, *, site="meta"):
"""
Collects data for a single Stackexchange site and
stores extracted JSON on $OUTPUT_DIR/`site API name`.
"""
output_dir = os.path.join(self.output_dir, site)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
for q in self.questions(site=site):
item = self.to_output_json(q)
file_name = os.path.join(output_dir,
"stackexchange_%s_%d.json" % (site, q["question_id"]))
with open(file_name, "w") as of:
json.dump(item, of, indent=4)
if self.s3:
self.bucket.upload(file_name)
def process_sites(self, *, sites):
"""
Process sites in parallel
"""
if self.workers <= 1: # single worker
for site in sites:
site_url = self.site_url(site)
site_name = self.site_name(site)
self.process_site(site=site)
print("*** %s (%s) processed" % (site_name, site_url))
else:
with Pool(processes=self.workers) as pool:
params = dict(s3=self.s3)
site_data = [(s, params) for s in sites]
for res in pool.starmap(process_site, site_data):
site = res.get("site")
params = res.get("params")
site_url = self.site_url(site)
site_name = self.site_name(site)
print("*** %s (%s) processed" % (site_name, site_url))
def get_sites(self, *, file_name="stackexchange_forums.xlsx"):
"""
Extrast site names form the excel workbook
"""
book = xlrd.open_workbook(file_name)
sheet = book.sheet_by_index(0)
for rx in range(sheet.nrows):
site_name, site_url = (c.value for c in sheet.row(rx))
site = self.find_site_by_url(site_url)
yield site["api_site_parameter"]
def process_xls(self, *, file_name="stackexchange_forums.xlsx"):
"""
Read the list of 'sites' from the Excel worksheet and
retrieve the question/ansers.
Expected format is (site name, site URL), eg,
Stack Overflow | http://stackoverflow.com/
Super User | http://superuser.com/
Ask Ubuntu | http://askubuntu.com/
...
"""
sites = self.get_sites(file_name=file_name)
self.process_sites(sites=sites)
def process_site(site="meta", params={}):
"""
Module level function for parallel invocation
"""
s3 = params.get("s3", False) # Parameters passed trough the pool
scraper = Scraper(s3=s3)
scraper.process_site(site=site)
return dict(site=site, params=params)
def main():
parser = argparse.ArgumentParser(description="Stackexchange site "
"parser and scraper.")
parser.add_argument('-W', '--workers', dest='workers',
help='Number of worker processes (default: %d)' % config.WORKERS, type=int, default=config.WORKERS)
parser.add_argument('-V', '--verbose', action='store_true',
help='Provides more detailed output.')
parser.add_argument('-l', '--list-sites', action='store_true',
help='List all available sites.')
parser.add_argument('--no-s3', dest='s3', action='store_false',
help='Suppress file upload to S3')
parser.set_defaults(s3=True)
parser.add_argument('-e', '--excel', dest='excel',
help=('Excel spreasheet workbook file name '
'containing list of the sites.'))
parser.add_argument('-s', '--site', dest='site',
help=('Single Stackexchange site API name, e.g., '
'"meta", "stacoverflow", etc. (default: "meta")'),
default="meta")
args = parser.parse_args()
scraper = Scraper(s3=args.s3, workers=args.workers, verbose=ags.verbose)
if args.list_sites:
for name, site in scraper.sites.items():
print((
"*** %(api_site_parameter)s:\nName: %(name)s, Type: %(site_type)s, " "URL: %(site_url)s, State: %(site_state)s") % site)
elif args.excel:
scraper.process_xls(file_name=args.excel)
else:
scraper.process_site(site=args.site)
if __name__ == '__main__':
main()
| 33.968447 | 162 | 0.522186 |
284b986d2faee4c10a6ad81b77a2539e9961ecc9 | 12,781 | py | Python | easy_command_sdk/model/container/workload_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | 5 | 2019-07-31T04:11:05.000Z | 2021-01-07T03:23:20.000Z | easy_command_sdk/model/container/workload_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | easy_command_sdk/model/container/workload_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: workload.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from easy_command_sdk.model.container import container_pb2 as easy__command__sdk_dot_model_dot_container_dot_container__pb2
from easy_command_sdk.model.container import volume_pb2 as easy__command__sdk_dot_model_dot_container_dot_volume__pb2
from easy_command_sdk.model.container import deployment_strategy_pb2 as easy__command__sdk_dot_model_dot_container_dot_deployment__strategy__pb2
from easy_command_sdk.model.container import local_object_reference_pb2 as easy__command__sdk_dot_model_dot_container_dot_local__object__reference__pb2
from easy_command_sdk.model.container import deployment_status_pb2 as easy__command__sdk_dot_model_dot_container_dot_deployment__status__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='workload.proto',
package='container',
syntax='proto3',
serialized_options=_b('ZCgo.easyops.local/contracts/protorepo-models/easyops/model/container'),
serialized_pb=_b('\n\x0eworkload.proto\x12\tcontainer\x1a\x30\x65\x61sy_command_sdk/model/container/container.proto\x1a-easy_command_sdk/model/container/volume.proto\x1a:easy_command_sdk/model/container/deployment_strategy.proto\x1a=easy_command_sdk/model/container/local_object_reference.proto\x1a\x38\x65\x61sy_command_sdk/model/container/deployment_status.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xe3\x04\n\x08Workload\x12\x12\n\ninstanceId\x18\x01 \x01(\t\x12\x0c\n\x04kind\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x11\n\tnamespace\x18\x04 \x01(\t\x12\x14\n\x0cresourceName\x18\x05 \x01(\t\x12.\n\ncontainers\x18\x06 \x03(\x0b\x32\x1a.container.ContainerConfig\x12\x10\n\x08replicas\x18\x07 \x01(\x05\x12\"\n\x07volumes\x18\x08 \x03(\x0b\x32\x11.container.Volume\x12,\n\x0b\x61nnotations\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\'\n\x06labels\x18\n \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x11\n\tdnsPolicy\x18\x0b \x01(\t\x12\x15\n\rrestartPolicy\x18\x0c \x01(\t\x12\x39\n\x12\x64\x65ploymentStrategy\x18\r \x01(\x0b\x32\x1d.container.DeploymentStrategy\x12\x39\n\x10imagePullSecrets\x18\x0e \x03(\x0b\x32\x1f.container.LocalObjectReference\x12\x35\n\x10\x64\x65ploymentStatus\x18\x0f \x01(\x0b\x32\x1b.container.DeploymentStatus\x12\x14\n\x0cresourceSpec\x18\x10 \x01(\t\x12\x0f\n\x07\x63reator\x18\x11 \x01(\t\x12\x19\n\x11\x63reationTimestamp\x18\x12 \x01(\t\x12\r\n\x05state\x18\x13 \x01(\t\x12\x19\n\x11transitionMessage\x18\x14 \x01(\tBEZCgo.easyops.local/contracts/protorepo-models/easyops/model/containerb\x06proto3')
,
dependencies=[easy__command__sdk_dot_model_dot_container_dot_container__pb2.DESCRIPTOR,easy__command__sdk_dot_model_dot_container_dot_volume__pb2.DESCRIPTOR,easy__command__sdk_dot_model_dot_container_dot_deployment__strategy__pb2.DESCRIPTOR,easy__command__sdk_dot_model_dot_container_dot_local__object__reference__pb2.DESCRIPTOR,easy__command__sdk_dot_model_dot_container_dot_deployment__status__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
_WORKLOAD = _descriptor.Descriptor(
name='Workload',
full_name='container.Workload',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='instanceId', full_name='container.Workload.instanceId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='kind', full_name='container.Workload.kind', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='container.Workload.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='namespace', full_name='container.Workload.namespace', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resourceName', full_name='container.Workload.resourceName', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='containers', full_name='container.Workload.containers', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='replicas', full_name='container.Workload.replicas', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='volumes', full_name='container.Workload.volumes', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='annotations', full_name='container.Workload.annotations', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='container.Workload.labels', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dnsPolicy', full_name='container.Workload.dnsPolicy', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='restartPolicy', full_name='container.Workload.restartPolicy', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deploymentStrategy', full_name='container.Workload.deploymentStrategy', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='imagePullSecrets', full_name='container.Workload.imagePullSecrets', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deploymentStatus', full_name='container.Workload.deploymentStatus', index=14,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resourceSpec', full_name='container.Workload.resourceSpec', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creator', full_name='container.Workload.creator', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creationTimestamp', full_name='container.Workload.creationTimestamp', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='container.Workload.state', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='transitionMessage', full_name='container.Workload.transitionMessage', index=19,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=338,
serialized_end=949,
)
_WORKLOAD.fields_by_name['containers'].message_type = easy__command__sdk_dot_model_dot_container_dot_container__pb2._CONTAINERCONFIG
_WORKLOAD.fields_by_name['volumes'].message_type = easy__command__sdk_dot_model_dot_container_dot_volume__pb2._VOLUME
_WORKLOAD.fields_by_name['annotations'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_WORKLOAD.fields_by_name['labels'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_WORKLOAD.fields_by_name['deploymentStrategy'].message_type = easy__command__sdk_dot_model_dot_container_dot_deployment__strategy__pb2._DEPLOYMENTSTRATEGY
_WORKLOAD.fields_by_name['imagePullSecrets'].message_type = easy__command__sdk_dot_model_dot_container_dot_local__object__reference__pb2._LOCALOBJECTREFERENCE
_WORKLOAD.fields_by_name['deploymentStatus'].message_type = easy__command__sdk_dot_model_dot_container_dot_deployment__status__pb2._DEPLOYMENTSTATUS
DESCRIPTOR.message_types_by_name['Workload'] = _WORKLOAD
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Workload = _reflection.GeneratedProtocolMessageType('Workload', (_message.Message,), {
'DESCRIPTOR' : _WORKLOAD,
'__module__' : 'workload_pb2'
# @@protoc_insertion_point(class_scope:container.Workload)
})
_sym_db.RegisterMessage(Workload)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 58.360731 | 1,562 | 0.773179 |
a21f64b01fd96e84aa0b203b59d041d6383ab4cd | 4,793 | py | Python | fiases/room.py | u4097/elasticsearch-fias | d03e16492af2f39a7cc59723aa2af8d04998ecfd | [
"MIT"
] | 3 | 2020-02-14T06:20:14.000Z | 2022-01-10T12:40:13.000Z | fiases/room.py | u4097/elasticsearch-fias | d03e16492af2f39a7cc59723aa2af8d04998ecfd | [
"MIT"
] | null | null | null | fiases/room.py | u4097/elasticsearch-fias | d03e16492af2f39a7cc59723aa2af8d04998ecfd | [
"MIT"
] | 1 | 2020-02-14T06:28:29.000Z | 2020-02-14T06:28:29.000Z | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from tqdm import tqdm
from lxml import etree
from pathlib import Path
from fiases.fias_data import ES
from xml.dom import pulldom
from xml.dom.pulldom import parse
from elasticsearch.helpers import parallel_bulk
from elasticsearch.client import IndicesClient
# Local modules:
import fiases.fias_data
from fiases.fias_info import getUpdateVersion
from fiases.fias_download import downloadFull, unRarFull
def import_room(room):
fiases.fias_data.createTmpDir()
# 1. версия
getUpdateVersion()
# 2. загрузка
downloadFull()
# 3. распаковка
unRarFull(room)
# 4. маппинг
if (ES.indices.exists(room.INDEX)):
ES.indices.delete(index=room.INDEX)
SHARDS_NUMBER = "1"
SETTINGS = {
"index": {
"number_of_shards": SHARDS_NUMBER,
"number_of_replicas": "0",
"refresh_interval": "-1",
"requests": {
"cache": {
"enable": "true"
}
},
"blocks": {
"read_only_allow_delete": "false"
}
}
}
PROPERTIES = {
"live_status": {
"type": "keyword",
},
"room_guid": {
"type": "keyword",
},
"flat_num": {
"type": "keyword",
},
"room_num": {
"type": "keyword",
},
"room_type": {
"type": "keyword",
},
"flat_type": {
"type": "keyword",
},
"postal_code": {
"type": "keyword"
},
"counter": {
"type": "keyword"
},
"end_date": {
"type": "date"
},
"start_date": {
"type": "date"
},
"bazis_finish_date": {
"type": "date"
},
"bazis_create_date": {
"type": "date"
},
"bazis_update_date": {
"type": "date"
},
"update_date": {
"type": "date"
},
"cad_num": {
"type": "keyword"
}
}
ES.indices.create(index=room.INDEX,
body={
'mappings': {
"dynamic": False,
"properties": PROPERTIES
},
"settings": SETTINGS
})
# 6. препроцессор
room.createPreprocessor()
# 7. импорт
doc = parse(fiases.fias_data.WORK_DIR + room.xml_file)
def importFull():
counter = 0
for event, node in doc:
if event == pulldom.START_ELEMENT \
and node.tagName == room.TAG:
yield {
"_index": room.INDEX,
"_type": "_doc",
"_op_type": fiases.fias_data.INDEX_OPER,
'pipeline': room.PIPELINE,
"_id": node.getAttribute("ROOMID"),
"room_guid": node.getAttribute("ROOMGUID"),
"house_guid": node.getAttribute("HOUSEGUID"),
"flat_num": node.getAttribute("FLATNUMBER"),
"room_num": node.getAttribute("ROOMNUMBER"),
"room_type": node.getAttribute("ROOMTYPE"),
"flat_type": node.getAttribute("FLATTYPE"),
"live_status":node.getAttribute("LIVESTATUS"),
"region_code": node.getAttribute("REGIONCODE"),
"postal_code": node.getAttribute("POSTALCODE"),
"start_date": node.getAttribute("STARTDATE"),
"end_date": node.getAttribute("ENDDATE"),
"update_date": node.getAttribute("UPDATEDATE"),
"norm_doc": node.getAttribute("NORMDOC"),
"cad_num": node.getAttribute("CADNUM"),
"bazis_create_date": fiases.fias_data.CREATE_DATE_ZERO,
"bazis_update_date": fiases.fias_data.UPDATE_DATE_ZERO,
"update_date": node.getAttribute("UPDATEDATE"),
"bazis_finish_date": node.getAttribute("ENDDATE")
}
for ok, info in tqdm(parallel_bulk(ES,
importFull(),
raise_on_error=False,
raise_on_exception=False),
unit=' адрес',
desc=' загружено',
total=room.COUNT):
if (not ok):
print(ok, info)
IndicesClient(ES).refresh()
IndicesClient(ES).flush()
IndicesClient(ES).forcemerge()
room = fiases.fias_data.Room()
import_room(room=room)
| 29.404908 | 75 | 0.473816 |
a5c0edd25b90df4f9730e0cd2b7abc8b5372295e | 5,454 | py | Python | tests/test_cell_metadata.py | broadinstitute/scp-ingest-service | 1a63a27061b53a5f7909c72d59808f9af71456a6 | [
"BSD-3-Clause"
] | null | null | null | tests/test_cell_metadata.py | broadinstitute/scp-ingest-service | 1a63a27061b53a5f7909c72d59808f9af71456a6 | [
"BSD-3-Clause"
] | null | null | null | tests/test_cell_metadata.py | broadinstitute/scp-ingest-service | 1a63a27061b53a5f7909c72d59808f9af71456a6 | [
"BSD-3-Clause"
] | null | null | null | import sys
import unittest
import json
from mock_data.annotation.metadata.convention.valid_array_v2_1_2 import (
valid_array_v2_1_2_models,
)
sys.path.append("../ingest")
from cell_metadata import CellMetadata
from validation.validate_metadata import collect_jsonschema_errors
from ingest_pipeline import IngestPipeline
from ingest_files import IngestFiles
class TestCellMetadata(unittest.TestCase):
def test_validate_header_for_coordinate_values_false(self):
"""Ensures validate_header_for_coordinate_values returns false when
coordinate value is in metadata file
Note: cluster has similar set of tests
"""
cm = CellMetadata(
"../tests/data/metadata_has_coordinate_header.txt",
"5d276a50421aa9117c982845",
"5dd5ae25421aa910a723a337",
study_accession="SCP2",
tracer=None,
)
self.assertFalse(cm.validate_header_for_coordinate_values())
def test_validate_header_for_coordinate_values_true(self):
"""Ensures validate_header_for_coordinate_values returns true when
coordinate value is not in metadata file
"""
cm = CellMetadata(
"../tests/data/metadata_example.txt",
"5d276a50421aa9117c982845",
"5dd5ae25421aa910a723a337",
study_accession="SCP2",
tracer=None,
)
cm.preprocess()
self.assertTrue(cm.validate_header_for_coordinate_values())
def test_metadata_type_inference(self):
"""Confirm consistency of type inference behavior
in instantiated data frame
"""
cm = CellMetadata(
"../tests/data/annotation/metadata/metadata_NA.txt",
"addedfeed000000000000000",
"dec0dedfeed1111111111111",
study_accession="SCPtest",
)
cm.preprocess()
# integers, empty cell and string as inputs for numeric annotation
assert isinstance(
cm.file["NA_i_n_s__grp"]["group"][3], str
), "empty cell -> NaN, expect coercion to string"
# integers and empty cell as inputs for numeric annotation
assert isinstance(
cm.file["NA_i_n_grp"]["group"][3], str
), "empty cell -> NaN, expect coercion to string"
# floats, empty cell and string as inputs for numeric annotation
assert isinstance(
cm.file["NA_f_n_s__grp"]["group"][3], str
), "empty cell -> NaN, expect coercion to string"
# floats and empty cell as inputs for numeric annotation
assert isinstance(
cm.file["NA_f_n_grp"]["group"][3], str
), "empty cell -> NaN, expect coercion to string"
# integers, empty cell and string as inputs for group annotation
assert isinstance(
cm.file["NA_i_n_s__num"]["numeric"][3], float
), "empty cell -> NaN that remains float (not coerced)"
# floats, empty cell and string as inputs for group annotation
assert isinstance(
cm.file["NA_f_n_s__num"]["numeric"][3], float
), "empty cell -> NaN that remains float (not coerced)"
def test_transform(self):
# Numeric columns that have array convention data are stored as a group in Mongo
cm = CellMetadata(
"../tests/data/annotation/metadata/convention/valid_array_v2.1.2.txt",
"5ea08bb17b2f150f29f4d952",
"600f42bdb067340e777b1385",
study_accession="SCP123",
)
cm.preprocess(is_metadata_convention=True)
convention_file_object = IngestFiles(
CellMetadata.JSON_CONVENTION, ["application/json"]
)
json_file = convention_file_object.open_file(CellMetadata.JSON_CONVENTION)
convention = json.load(json_file)
collect_jsonschema_errors(cm, convention)
for metadata_model in cm.transform():
model = metadata_model.model
model_name = model["name"]
expect_model = valid_array_v2_1_2_models["cell_metadata_models"][model_name]
self.assertEqual(model, expect_model)
def test_skip_large_group_transform(self):
# metadata "barcodekey" - 250 unique values, should not transform
# metadata "scale" - 200 unique values , should transform
cm = CellMetadata(
"../tests/data/annotation/metadata/convention/large_group_metadata_to_skip.txt",
"612e90364e68d4b7e3ece4d0",
"612e998b4e68d4b7e3ece504",
study_accession="SCP3",
)
cm.preprocess(is_metadata_convention=True)
convention_file_object = IngestFiles(
CellMetadata.JSON_CONVENTION, ["application/json"]
)
json_file = convention_file_object.open_file(CellMetadata.JSON_CONVENTION)
convention = json.load(json_file)
collect_jsonschema_errors(cm, convention)
values_array_empty = []
for metadata_model in cm.transform():
if not metadata_model.model["values"]:
values_array_empty.append(metadata_model.model["name"])
barcodekey = True if "barcodekey" in values_array_empty else False
scale = True if "scale" in values_array_empty else False
self.assertTrue(
barcodekey, "metadata with too many unique values should not store values"
)
self.assertFalse(scale, "metadata with exactly 200 values should be stored")
| 40.4 | 92 | 0.657682 |
86b560f8af4aa77b581d1af06d7fe48f629ee09a | 1,472 | py | Python | tests/embedding/metric/test_distortion_metrics.py | microsoft/topologic | d3a2155a42469ccb16de178f47bec81b0476fdc8 | [
"MIT"
] | 24 | 2020-02-10T23:51:06.000Z | 2021-11-17T02:34:47.000Z | tests/embedding/metric/test_distortion_metrics.py | microsoft/topologic | d3a2155a42469ccb16de178f47bec81b0476fdc8 | [
"MIT"
] | 26 | 2020-02-11T18:37:33.000Z | 2020-11-11T00:14:41.000Z | tests/embedding/metric/test_distortion_metrics.py | microsoft/topologic | d3a2155a42469ccb16de178f47bec81b0476fdc8 | [
"MIT"
] | 6 | 2020-07-31T11:05:36.000Z | 2021-11-10T08:18:52.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import unittest
import topologic as tc
import networkx as nx
import numpy as np
class TestDistortionMetrics(unittest.TestCase):
def test_mean_average_precision_calculated_correctly(self):
graph = nx.Graph()
graph.add_edges_from([(0, 1), (0, 2), (2, 0), (2, 1), (1, 0), (3, 2), (3, 1)])
embedding = np.array([[0, 0], [0, 1], [1, 1], [0.5, 1]])
label_indices = [0, 1, 2, 3]
emb_container = tc.embedding.EmbeddingContainer(embedding=embedding, vertex_labels=label_indices)
expected_mean_avg_precision = 0.9583
calculated_mean_avg_precision = tc.embedding.metric.distortion_metrics.mean_average_precision(
graph,
emb_container
)
self.assertAlmostEqual(expected_mean_avg_precision, calculated_mean_avg_precision, places=4)
def test_mean_average_precision_graph_not_specified_error_raised(self):
embedding_container = tc.embedding.EmbeddingContainer(embedding=[[0]], vertex_labels=[0])
with self.assertRaises(ValueError):
tc.embedding.metric.distortion_metrics.mean_average_precision(None, embedding_container)
def test_mean_average_precision_embedding_not_specified_error_raised(self):
with self.assertRaises(ValueError):
tc.embedding.metric.distortion_metrics.mean_average_precision(nx.Graph(), None)
| 39.783784 | 106 | 0.699728 |
fdfe71973c08d2d01521f7c161c30d193b839104 | 365 | py | Python | django/testapp/tests.py | xyshal/django-site-template | 2b9af032b19dd85acbe65dbbba90a43402cf4d88 | [
"MIT"
] | null | null | null | django/testapp/tests.py | xyshal/django-site-template | 2b9af032b19dd85acbe65dbbba90a43402cf4d88 | [
"MIT"
] | null | null | null | django/testapp/tests.py | xyshal/django-site-template | 2b9af032b19dd85acbe65dbbba90a43402cf4d88 | [
"MIT"
] | null | null | null | from django.test import TestCase
from testapp.models import TestModel
class TestCase(TestCase):
def setUp(self):
TestModel.objects.create(address="123 Fake St", state="AK", zip_code=99701)
def test_f(self):
record = TestModel.objects.get(address="123 Fake St", state="AK", zip_code=99701)
self.assertEqual(record.zip_code, 99701)
| 30.416667 | 89 | 0.709589 |
0cfa70e1483ba08d37d4dfb1b04fbf1ee94b9563 | 821 | py | Python | aalh_iit_transportation_003/cleanup-description-pipes.py | johndewees/iitmigration | 4dadfbecda719d6e7d60af076a231aedec3c862f | [
"Unlicense"
] | null | null | null | aalh_iit_transportation_003/cleanup-description-pipes.py | johndewees/iitmigration | 4dadfbecda719d6e7d60af076a231aedec3c862f | [
"Unlicense"
] | null | null | null | aalh_iit_transportation_003/cleanup-description-pipes.py | johndewees/iitmigration | 4dadfbecda719d6e7d60af076a231aedec3c862f | [
"Unlicense"
] | null | null | null | from openpyxl import load_workbook
filename = 'aalh_iit_transportation_003.xlsx'
wb = load_workbook(filename)
ws = wb['Metadata Template']
minimumcol = 8
maximumcol = 8
minimumrow = 7
maximumrow = 515
iterationrow = 7
targetcol = 13
titlecol = 2
desccol = 8
for row in ws.iter_rows(min_row=minimumrow, min_col=minimumcol, max_row=maximumrow, max_col=maximumcol):
testvar = ws.cell(row=iterationrow, column=desccol).value
for cell in row:
if testvar.endswith('|'):
desc = testvar[:-1]
desc = desc.strip()
ws.cell(row=iterationrow, column=desccol).value = desc
print(iterationrow)
print('PIPE FOUND')
else:
continue
iterationrow = iterationrow + 1
wb.save("aalh_iit_transportation_003.xlsx") | 28.310345 | 105 | 0.651644 |
faaefca06b3edebf1066341dfb8d5490daed2698 | 20,045 | py | Python | django/db/backends/mysql/base.py | aptivate/django-old | 33b2a85150658fc7c2207ac2f5fd6f89739773e4 | [
"BSD-3-Clause"
] | 50 | 2015-01-13T10:01:41.000Z | 2021-10-15T01:44:43.000Z | django/db/backends/mysql/base.py | akaariai/django-old | 45b80c420d6655ec5b86bea3b3c17b4adaa61291 | [
"BSD-3-Clause"
] | 20 | 2015-04-20T12:09:12.000Z | 2022-03-12T01:25:04.000Z | django/db/backends/mysql/base.py | akaariai/django-old | 45b80c420d6655ec5b86bea3b3c17b4adaa61291 | [
"BSD-3-Clause"
] | 24 | 2015-07-22T08:08:54.000Z | 2021-12-28T06:56:09.000Z | """
MySQL database backend for Django.
Requires MySQLdb: http://sourceforge.net/projects/mysql-python
"""
import datetime
import re
import sys
import warnings
try:
import MySQLdb as Database
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e)
# We want version (1, 2, 1, 'final', 2) or later. We can't just use
# lexicographic ordering in this check because then (1, 2, 1, 'gamma')
# inadvertently passes the version test.
version = Database.version_info
if (version < (1,2,1) or (version[:3] == (1, 2, 1) and
(len(version) < 5 or version[3] != 'final' or version[4] < 2))):
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__)
from MySQLdb.converters import conversions, Thing2Literal
from MySQLdb.constants import FIELD_TYPE, CLIENT
from django.db import utils
from django.db.backends import *
from django.db.backends.signals import connection_created
from django.db.backends.mysql.client import DatabaseClient
from django.db.backends.mysql.creation import DatabaseCreation
from django.db.backends.mysql.introspection import DatabaseIntrospection
from django.db.backends.mysql.validation import DatabaseValidation
from django.utils.safestring import SafeString, SafeUnicode
from django.utils import timezone
# Raise exceptions for database warnings if DEBUG is on
from django.conf import settings
if settings.DEBUG:
warnings.filterwarnings("error", category=Database.Warning)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
# It's impossible to import datetime_or_None directly from MySQLdb.times
parse_datetime = conversions[FIELD_TYPE.DATETIME]
def parse_datetime_with_timezone_support(value):
dt = parse_datetime(value)
# Confirm that dt is naive before overwriting its tzinfo.
if dt is not None and settings.USE_TZ and timezone.is_naive(dt):
dt = dt.replace(tzinfo=timezone.utc)
return dt
def adapt_datetime_with_timezone_support(value, conv):
# Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
if settings.USE_TZ:
if timezone.is_naive(value):
warnings.warn(u"SQLite received a naive datetime (%s)"
u" while time zone support is active." % value,
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return Thing2Literal(value.strftime("%Y-%m-%d %H:%M:%S"), conv)
# MySQLdb-1.2.1 returns TIME columns as timedelta -- they are more like
# timedelta in terms of actual behavior as they are signed and include days --
# and Django expects time, so we still need to override that. We also need to
# add special handling for SafeUnicode and SafeString as MySQLdb's type
# checking is too tight to catch those (see Django ticket #6052).
# Finally, MySQLdb always returns naive datetime objects. However, when
# timezone support is active, Django expects timezone-aware datetime objects.
django_conversions = conversions.copy()
django_conversions.update({
FIELD_TYPE.TIME: util.typecast_time,
FIELD_TYPE.DECIMAL: util.typecast_decimal,
FIELD_TYPE.NEWDECIMAL: util.typecast_decimal,
FIELD_TYPE.DATETIME: parse_datetime_with_timezone_support,
datetime.datetime: adapt_datetime_with_timezone_support,
})
# This should match the numerical portion of the version numbers (we can treat
# versions like 5.0.24 and 5.0.24a as the same). Based on the list of version
# at http://dev.mysql.com/doc/refman/4.1/en/news.html and
# http://dev.mysql.com/doc/refman/5.0/en/news.html .
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
# point is to raise Warnings as exceptions, this can be done with the Python
# warning module, and this is setup when the connection is created, and the
# standard util.CursorDebugWrapper can be used. Also, using sql_mode
# TRADITIONAL will automatically cause most warnings to be treated as errors.
class CursorWrapper(object):
"""
A thin wrapper around MySQLdb's normal cursor class so that we can catch
particular exception instances and reraise them with the right types.
Implemented as a wrapper, rather than a subclass, so that we aren't stuck
to the particular underlying representation returned by Connection.cursor().
"""
codes_for_integrityerror = (1048,)
def __init__(self, cursor):
self.cursor = cursor
def execute(self, query, args=None):
try:
return self.cursor.execute(query, args)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.OperationalError, e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e[0] in self.codes_for_integrityerror:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def executemany(self, query, args):
try:
return self.cursor.executemany(query, args)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.OperationalError, e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e[0] in self.codes_for_integrityerror:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
update_can_self_select = False
allows_group_by_pk = True
related_fields_match_type = True
allow_sliced_subqueries = False
has_bulk_insert = True
has_select_for_update = True
has_select_for_update_nowait = False
supports_forward_references = False
supports_long_model_names = False
supports_microsecond_precision = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
allows_primary_key_0 = False
def __init__(self, connection):
super(DatabaseFeatures, self).__init__(connection)
self._storage_engine = None
def _mysql_storage_engine(self):
"Internal method used in Django tests. Don't rely on this from your code"
if self._storage_engine is None:
cursor = self.connection.cursor()
cursor.execute('CREATE TABLE INTROSPECT_TEST (X INT)')
# This command is MySQL specific; the second column
# will tell you the default table type of the created
# table. Since all Django's test tables will have the same
# table type, that's enough to evaluate the feature.
cursor.execute("SHOW TABLE STATUS WHERE Name='INTROSPECT_TEST'")
result = cursor.fetchone()
cursor.execute('DROP TABLE INTROSPECT_TEST')
self._storage_engine = result[1]
return self._storage_engine
def _can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
return self._mysql_storage_engine() != 'MyISAM'
class DatabaseOperations(BaseDatabaseOperations):
compiler_module = "django.db.backends.mysql.compiler"
def date_extract_sql(self, lookup_type, field_name):
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
if lookup_type == 'week_day':
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
# Note: WEEKDAY() returns 0-6, Monday=0.
return "DAYOFWEEK(%s)" % field_name
else:
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
def date_trunc_sql(self, lookup_type, field_name):
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
try:
i = fields.index(lookup_type) + 1
except ValueError:
sql = field_name
else:
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql
def date_interval_sql(self, sql, connector, timedelta):
return "(%s %s INTERVAL '%d 0:0:%d:%d' DAY_MICROSECOND)" % (sql, connector,
timedelta.days, timedelta.seconds, timedelta.microseconds)
def drop_foreignkey_sql(self):
return "DROP FOREIGN KEY"
def force_no_ordering(self):
"""
"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
columns. If no ordering would otherwise be applied, we don't want any
implicit sorting going on.
"""
return ["NULL"]
def fulltext_search_sql(self, field_name):
return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name
def last_executed_query(self, cursor, sql, params):
# With MySQLdb, cursor objects have an (undocumented) "_last_executed"
# attribute where the exact query sent to the database is saved.
# See MySQLdb/cursors.py in the source distribution.
return cursor._last_executed
def no_limit_value(self):
# 2**64 - 1, as recommended by the MySQL documentation
return 18446744073709551615L
def quote_name(self, name):
if name.startswith("`") and name.endswith("`"):
return name # Quoting once is enough.
return "`%s`" % name
def random_function_sql(self):
return 'RAND()'
def sql_flush(self, style, tables, sequences):
# NB: The generated SQL below is specific to MySQL
# 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
# to clear all tables of all data
if tables:
sql = ['SET FOREIGN_KEY_CHECKS = 0;']
for table in tables:
sql.append('%s %s;' % (style.SQL_KEYWORD('TRUNCATE'), style.SQL_FIELD(self.quote_name(table))))
sql.append('SET FOREIGN_KEY_CHECKS = 1;')
# 'ALTER TABLE table AUTO_INCREMENT = 1;'... style SQL statements
# to reset sequence indices
sql.extend(["%s %s %s %s %s;" % \
(style.SQL_KEYWORD('ALTER'),
style.SQL_KEYWORD('TABLE'),
style.SQL_TABLE(self.quote_name(sequence['table'])),
style.SQL_KEYWORD('AUTO_INCREMENT'),
style.SQL_FIELD('= 1'),
) for sequence in sequences])
return sql
else:
return []
def value_to_db_datetime(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware datetimes
if timezone.is_aware(value):
if settings.USE_TZ:
value = value.astimezone(timezone.utc).replace(tzinfo=None)
else:
raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.")
# MySQL doesn't support microseconds
return unicode(value.replace(microsecond=0))
def value_to_db_time(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware times
if timezone.is_aware(value):
raise ValueError("MySQL backend does not support timezone-aware times.")
# MySQL doesn't support microseconds
return unicode(value.replace(microsecond=0))
def year_lookup_bounds(self, value):
# Again, no microseconds
first = '%s-01-01 00:00:00'
second = '%s-12-31 23:59:59.99'
return [first % value, second % value]
def max_name_length(self):
return 64
def bulk_insert_sql(self, fields, num_values):
items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
return "VALUES " + ", ".join([items_sql] * num_values)
def savepoint_create_sql(self, sid):
return "SAVEPOINT %s" % sid
def savepoint_commit_sql(self, sid):
return "RELEASE SAVEPOINT %s" % sid
def savepoint_rollback_sql(self, sid):
return "ROLLBACK TO SAVEPOINT %s" % sid
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'mysql'
operators = {
'exact': '= %s',
'iexact': 'LIKE %s',
'contains': 'LIKE BINARY %s',
'icontains': 'LIKE %s',
'regex': 'REGEXP BINARY %s',
'iregex': 'REGEXP %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE BINARY %s',
'endswith': 'LIKE BINARY %s',
'istartswith': 'LIKE %s',
'iendswith': 'LIKE %s',
}
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.server_version = None
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = DatabaseValidation(self)
def _valid_connection(self):
if self.connection is not None:
try:
self.connection.ping()
return True
except DatabaseError:
self.connection.close()
self.connection = None
return False
def _cursor(self):
new_connection = False
if not self._valid_connection():
new_connection = True
kwargs = {
'conv': django_conversions,
'charset': 'utf8',
'use_unicode': True,
}
settings_dict = self.settings_dict
if settings_dict['USER']:
kwargs['user'] = settings_dict['USER']
if settings_dict['NAME']:
kwargs['db'] = settings_dict['NAME']
if settings_dict['PASSWORD']:
kwargs['passwd'] = settings_dict['PASSWORD']
if settings_dict['HOST'].startswith('/'):
kwargs['unix_socket'] = settings_dict['HOST']
elif settings_dict['HOST']:
kwargs['host'] = settings_dict['HOST']
if settings_dict['PORT']:
kwargs['port'] = int(settings_dict['PORT'])
# We need the number of potentially affected rows after an
# "UPDATE", not the number of changed rows.
kwargs['client_flag'] = CLIENT.FOUND_ROWS
kwargs.update(settings_dict['OPTIONS'])
self.connection = Database.connect(**kwargs)
self.connection.encoders[SafeUnicode] = self.connection.encoders[unicode]
self.connection.encoders[SafeString] = self.connection.encoders[str]
self.features.uses_savepoints = \
self.get_server_version() >= (5, 0, 3)
connection_created.send(sender=self.__class__, connection=self)
cursor = self.connection.cursor()
if new_connection:
# SQL_AUTO_IS_NULL in MySQL controls whether an AUTO_INCREMENT column
# on a recently-inserted row will return when the field is tested for
# NULL. Disabling this value brings this aspect of MySQL in line with
# SQL standards.
cursor.execute('SET SQL_AUTO_IS_NULL = 0')
return CursorWrapper(cursor)
def _rollback(self):
try:
BaseDatabaseWrapper._rollback(self)
except Database.NotSupportedError:
pass
def get_server_version(self):
if not self.server_version:
if not self._valid_connection():
self.cursor()
m = server_version_re.match(self.connection.get_server_info())
if not m:
raise Exception('Unable to determine MySQL version from version string %r' % self.connection.get_server_info())
self.server_version = tuple([int(x) for x in m.groups()])
return self.server_version
def disable_constraint_checking(self):
"""
Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True,
to indicate constraint checks need to be re-enabled.
"""
self.cursor().execute('SET foreign_key_checks=0')
return True
def enable_constraint_checking(self):
"""
Re-enable foreign key checks after they have been disabled.
"""
self.cursor().execute('SET foreign_key_checks=1')
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign key references. This method is
intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint checks were off.
Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides
detailed information about the invalid reference in the error message.
Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS
ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.get_table_list(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute("""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
% (primary_key_column_name, column_name, table_name, referenced_table_name,
column_name, referenced_column_name, column_name, referenced_column_name))
for bad_row in cursor.fetchall():
raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid "
"foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
% (table_name, bad_row[0],
table_name, column_name, bad_row[1],
referenced_table_name, referenced_column_name))
| 43.015021 | 127 | 0.648192 |
cbe12781fd76d848319803aa579d95af7648f78e | 589 | py | Python | setup_SDN_MultipleTrial.py | anagamori/python-code | ce19ca30f177726b48b30ea1b02c0c73ba5d5d3f | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | setup_SDN_MultipleTrial.py | anagamori/python-code | ce19ca30f177726b48b30ea1b02c0c73ba5d5d3f | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | setup_SDN_MultipleTrial.py | anagamori/python-code | ce19ca30f177726b48b30ea1b02c0c73ba5d5d3f | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 12 14:25:34 2018
@author: akira
"""
from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
from Cython.Distutils import build_ext
ext_modules=[ Extension("SDN_MultipleTrial",
["SDN_MultipleTrial.pyx"],
libraries=["m"],
extra_compile_args = ["-ffast-math","-fopenmp"],
extra_link_args = ['-fopenmp'])
]
setup(
cmdclass = {"build_ext":build_ext},
ext_modules = cythonize("SDN_MultipleTrial.pyx")
) | 24.541667 | 62 | 0.657046 |
7dbab738011594e934c7125ffb58a3a00b46416c | 5,244 | py | Python | tests/st/ops/gpu/test_batch_matmul.py | dongkcs/mindspore | cd7df6dbf463ff3128e9181e9d0c779cecb81320 | [
"Apache-2.0"
] | 55 | 2020-12-17T10:26:06.000Z | 2022-03-28T07:18:26.000Z | tests/st/ops/gpu/test_batch_matmul.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
] | 1 | 2020-12-29T06:46:38.000Z | 2020-12-29T06:46:38.000Z | tests/st/ops/gpu/test_batch_matmul.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
] | 14 | 2021-01-29T02:39:47.000Z | 2022-03-23T05:00:26.000Z | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common import dtype as mstype
from mindspore.ops import operations as P
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
class BatchMatMulNet(nn.Cell):
def __init__(self, transpose_a=False, transpose_b=False):
super(BatchMatMulNet, self).__init__()
self.batch_matmul = P.BatchMatMul(transpose_a, transpose_b)
def construct(self, x, y):
return self.batch_matmul(x, y)
def test_4d():
input_x = Tensor(np.arange(2 * 4 * 1 * 3).reshape(2, 4, 1, 3), mstype.float32)
input_y = Tensor(np.arange(2 * 4 * 3 * 4).reshape(2, 4, 3, 4), mstype.float32)
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
net = BatchMatMulNet()
output = net(input_x, input_y)
expect = [[[[20, 23, 26, 29]],
[[200, 212, 224, 236]],
[[596, 617, 638, 659]],
[[1208, 1238, 1268, 1298]]],
[[[2036, 2075, 2114, 2153]],
[[3080, 3128, 3176, 3224]],
[[4340, 4397, 4454, 4511]],
[[5816, 5882, 5948, 6014]]]]
assert (output.asnumpy() == expect).all()
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_4d_transpose_a():
input_x = Tensor(np.arange(2 * 4 * 3 * 1).reshape(2, 4, 3, 1), mstype.float32)
input_y = Tensor(np.arange(2 * 4 * 3 * 4).reshape(2, 4, 3, 4), mstype.float32)
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
net = BatchMatMulNet(transpose_a=True)
output = net(input_x, input_y)
expect = [[[[20, 23, 26, 29]],
[[200, 212, 224, 236]],
[[596, 617, 638, 659]],
[[1208, 1238, 1268, 1298]]],
[[[2036, 2075, 2114, 2153]],
[[3080, 3128, 3176, 3224]],
[[4340, 4397, 4454, 4511]],
[[5816, 5882, 5948, 6014]]]]
assert (output.asnumpy() == expect).all()
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_4d_transpose_b():
input_x = Tensor(np.arange(2 * 4 * 1 * 3).reshape(2, 4, 1, 3), mstype.float32)
input_y = Tensor(np.arange(2 * 4 * 4 * 3).reshape(2, 4, 4, 3), mstype.float32)
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
net = BatchMatMulNet(transpose_b=True)
output = net(input_x, input_y)
expect = [[[[5, 14, 23, 32]],
[[158, 194, 230, 266]],
[[527, 590, 653, 716]],
[[1112, 1202, 1292, 1382]]],
[[[1913, 2030, 2147, 2264]],
[[2930, 3074, 3218, 3362]],
[[4163, 4334, 4505, 4676]],
[[5612, 5810, 6008, 6206]]]]
assert (output.asnumpy() == expect).all()
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_4d_transpose_ab():
input_x = Tensor(np.arange(2 * 4 * 3 * 1).reshape(2, 4, 3, 1), mstype.float32)
input_y = Tensor(np.arange(2 * 4 * 4 * 3).reshape(2, 4, 4, 3), mstype.float32)
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
net = BatchMatMulNet(transpose_a=True, transpose_b=True)
output = net(input_x, input_y)
expect = [[[[5, 14, 23, 32]],
[[158, 194, 230, 266]],
[[527, 590, 653, 716]],
[[1112, 1202, 1292, 1382]]],
[[[1913, 2030, 2147, 2264]],
[[2930, 3074, 3218, 3362]],
[[4163, 4334, 4505, 4676]],
[[5612, 5810, 6008, 6206]]]]
assert (output.asnumpy() == expect).all()
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_4D_fp16():
input_x = Tensor(np.arange(2 * 4 * 1 * 3).reshape(2, 4, 1, 3), mstype.float16)
input_y = Tensor(np.arange(2 * 4 * 3 * 4).reshape(2, 4, 3, 4), mstype.float16)
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
net = BatchMatMulNet()
output = net(input_x, input_y)
expect = np.array([[[[20, 23, 26, 29]],
[[200, 212, 224, 236]],
[[596, 617, 638, 659]],
[[1208, 1238, 1268, 1298]]],
[[[2036, 2076, 2114, 2152]],
[[3080, 3128, 3176, 3224]],
[[4340, 4396, 4456, 4510]],
[[5816, 5880, 5948, 6016]]]]).astype(np.float16)
assert (output.asnumpy() == expect).all()
| 36.671329 | 82 | 0.581808 |
27b9165c3b68f9905b094806a82e0d24e4a5fa99 | 5,000 | py | Python | UniExplore/base/views/register.py | MichaelHills01/group-software-project-1 | 20847c062f7b4221bfb483eca28cb09a7a646a2b | [
"MIT"
] | 1 | 2022-03-09T17:16:00.000Z | 2022-03-09T17:16:00.000Z | UniExplore/base/views/register.py | MichaelHills01/group-software-project-1 | 20847c062f7b4221bfb483eca28cb09a7a646a2b | [
"MIT"
] | 60 | 2022-02-20T22:23:13.000Z | 2022-03-23T16:37:45.000Z | UniExplore/base/views/register.py | MichaelHills01/group-software-project-1 | 20847c062f7b4221bfb483eca28cb09a7a646a2b | [
"MIT"
] | 16 | 2022-02-19T13:10:04.000Z | 2022-03-23T12:21:21.000Z | from ..forms import UserRegisterForm
from ..models import Profile
from django.contrib import messages
from django.contrib.auth import login
from django.shortcuts import render, redirect
from django.contrib.auth.models import User
from django.conf import settings
from django.contrib.auth.models import Group
from django.contrib.auth.tokens import default_token_generator
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from django.utils.encoding import force_str
from django.conf import settings
"""
Authors: Conor Behard Roberts
Description: Checks to see if an email is valid given a valid suffix
"""
def is_valid_email(email, valid_suffix):
ending = email.split('@')[1].lower()
return valid_suffix.lower() == ending
"""
Authors: Michael Hills, Conor Behard Roberts
Description: Function for user registration
"""
def registerPage(request):
# Getting form from forms.py
form = UserRegisterForm()
if request.method == 'POST':
form = UserRegisterForm(request.POST)
# Save form if it is valid
if form.is_valid():
email = form.cleaned_data.get('email')
username = form.cleaned_data.get('username').lower().capitalize()
try:
User.objects.get(username=username)
except BaseException:
if is_valid_email(email, settings.EMAIL_EXTENSION):
try:
# Check to see if there is already a user with the same email registered
User.objects.get(email=email)
except BaseException:
user = form.save(commit=False)
user.is_active = False
user.save()
user = form.save()
user.backend = 'django.contrib.auth.backends.ModelBackend' # Sets the backend authentication model
Profile.objects.create(
user=user,
name=username,
)
# Adds the user to the user group
group = Group.objects.get(name='user')
user.groups.add(group)
# If you want to make users without needing to authenticate set to True
developer_mode = False
if developer_mode == False:
subject = 'Activate Your UniExplore Account'
message = render_to_string('email_verification/account_activation_email.html', {
'user': user,
'domain': 'uniexplore.co.uk',
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'token': default_token_generator.make_token(user),
})
user.email_user(subject, message)
messages.success(request, ('Please Confirm your email to complete registration.'))
if developer_mode == True:
login(request, user)
messages.success(request, f'Account created for {username}!')
return redirect('home')
return redirect('login')
messages.warning(request, "A User with this email already exists")
return redirect('register')
else:
messages.warning(request, "Must sign up with an email ending in exeter.ac.uk")
return redirect('register')
messages.warning(request, "This username is taken")
return redirect('register')
context = {'form': form}
return render(request, 'base/login_register.html', context)
def activate_account(request, uidb64, token):
try:
uid = urlsafe_base64_decode(uidb64).decode()
user = User.objects.get(pk=uid)
except (TypeError, ValueError, OverflowError, User.DoesNotExist):
user = None
print(user)
print(token)
print(default_token_generator.check_token(user, token))
if user is not None and default_token_generator.check_token(user, token):
user.backend = 'django.contrib.auth.backends.ModelBackend' # Sets the backend authentication model
user.is_active = True
user.profile.email_confirmed = True
user.save()
login(request, user)
messages.success(request, ('Your account has been confirmed.'))
return redirect('home')
else:
messages.warning(request, ('The confirmation link was invalid, possibly because it has already been used.'))
return redirect('home') | 40.983607 | 123 | 0.5768 |
905b24ebf5da133ee222eaf584eafef81c80320d | 1,467 | py | Python | tests/cp2/test_cp2_cincoffset.py | capt-hb/cheritest | d3b3637a81a0005ee7272eca0f33a9f9911fdb32 | [
"Apache-2.0"
] | null | null | null | tests/cp2/test_cp2_cincoffset.py | capt-hb/cheritest | d3b3637a81a0005ee7272eca0f33a9f9911fdb32 | [
"Apache-2.0"
] | 2 | 2020-06-02T13:44:55.000Z | 2020-06-02T14:06:29.000Z | tests/cp2/test_cp2_cincoffset.py | capt-hb/cheritest | d3b3637a81a0005ee7272eca0f33a9f9911fdb32 | [
"Apache-2.0"
] | null | null | null | #-
# Copyright (c) 2014 Michael Roe
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
from beritest_tools import attr
class test_cp2_cincoffset(BaseBERITestCase):
@attr('capabilities')
def test_cp2_cincoffset_1(self):
'''Test that CIncOffset increments the offset field'''
self.assertRegisterEqual(self.MIPS.a0, 3, "CIncOffset gave incorrect result")
| 39.648649 | 85 | 0.768234 |
b4d68d2c77ec995ceb10914a4452bad0c0884791 | 1,724 | py | Python | src/detect_face_haarcascade.py | pourabkarchaudhuri/face-detection | 84e41af458ed9c891d712c4a2294472003cedb4f | [
"MIT"
] | 1 | 2020-07-26T00:30:20.000Z | 2020-07-26T00:30:20.000Z | src/detect_face_haarcascade.py | pourabkarchaudhuri/face-detection | 84e41af458ed9c891d712c4a2294472003cedb4f | [
"MIT"
] | null | null | null | src/detect_face_haarcascade.py | pourabkarchaudhuri/face-detection | 84e41af458ed9c891d712c4a2294472003cedb4f | [
"MIT"
] | null | null | null | import cv2
import sys
import time
import os
# cascPath = sys.argv[1]
def main():
print('Loading HaarCascade Classifier:')
# print(os.path.join(os.getcwd(), '../haarcascade/haarcascade_frontalface_default.xml'))
classifier_path = os.path.join(os.getcwd(), '../haarcascade/haarcascade_frontalface_default.xml')
faceCascade = cv2.CascadeClassifier(classifier_path)
fps = 0
frame_num = 0
video_capture = cv2.VideoCapture(0)
video_capture.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
video_capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
while True:
start_time = time.time()
ret, frame = video_capture.read()
frame_num = frame_num + 1
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30),)
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
end_time = time.time()
fps = fps * 0.9 + 1/(end_time - start_time) * 0.1
start_time = end_time
frame_info = 'Frame: {0}, FPS: {1:.2f}'.format(frame_num, fps)
cv2.putText(frame, frame_info, (10, frame.shape[0]-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1)
# frame = cv2.putText(frame, "{:.0f} iterations/sec".format(cps.countsPerSec()), (10, 450), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (255, 255, 255))
# Display the resulting frame
cv2.imshow('Video', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything is done, release the capture
video_capture.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
main() | 31.345455 | 147 | 0.638631 |
64d0ce939925e2322ef1db095ebaafb1a5c0c880 | 201 | py | Python | config.py | ilhamr0f11/Sub-Mux-IR-Bot | 6cba0e94d61f399fe5ac9e1ec1dcd72a33037785 | [
"MIT"
] | 1 | 2021-10-22T01:20:03.000Z | 2021-10-22T01:20:03.000Z | config.py | ilhamr0f11/Sub-Mux-IR-Bot | 6cba0e94d61f399fe5ac9e1ec1dcd72a33037785 | [
"MIT"
] | null | null | null | config.py | ilhamr0f11/Sub-Mux-IR-Bot | 6cba0e94d61f399fe5ac9e1ec1dcd72a33037785 | [
"MIT"
] | null | null | null |
import os
class Config:
BOT_TOKEN = os.environ.get('BOT_TOKEN', None)
APP_ID = os.environ.get('APP_ID', None)
API_HASH = os.environ.get('API_HASH', None)
DOWNLOAD_DIR = 'downloads'
| 18.272727 | 49 | 0.666667 |
277872faf1f9991e780c29b0f2cea181268b33a8 | 749 | py | Python | publisher.py | alivcor/presill | 43dd5d0d4d64050763e05a81c27debb0808daf6a | [
"Apache-2.0"
] | null | null | null | publisher.py | alivcor/presill | 43dd5d0d4d64050763e05a81c27debb0808daf6a | [
"Apache-2.0"
] | null | null | null | publisher.py | alivcor/presill | 43dd5d0d4d64050763e05a81c27debb0808daf6a | [
"Apache-2.0"
] | null | null | null | # Import package
import paho.mqtt.client as mqtt
# Define Variables
MQTT_HOST = "iot.eclipse.org"
MQTT_PORT = 1883
MQTT_KEEPALIVE_INTERVAL = 5
MQTT_TOPIC = "SampleTopic"
MQTT_MSG = "Hello MQTT"
# Define on_connect event Handler
def on_connect(mosq, obj, rc):
print "Connected to MQTT Broker"
# Define on_publish event Handler
def on_publish(client, userdata, mid):
print "Message Published..."
# Initiate MQTT Client
mqttc = mqtt.Client()
# Register Event Handlers
mqttc.on_publish = on_publish
mqttc.on_connect = on_connect
# Connect with MQTT Broker
mqttc.connect(MQTT_HOST, MQTT_PORT, MQTT_KEEPALIVE_INTERVAL)
# Publish message to MQTT Topic
mqttc.publish(MQTT_TOPIC, MQTT_MSG)
# Disconnect from MQTT_Broker
mqttc.disconnect()
| 20.243243 | 60 | 0.773031 |
3183972046e2cf18c05e9647f24ef8117c302410 | 32,990 | py | Python | venv/Lib/site-packages/skimage/draw/draw.py | amelliaaas/tugastkc4 | f442382c72379e911f3780543b95345a3b1c9407 | [
"Apache-2.0"
] | 4 | 2021-10-20T12:39:09.000Z | 2022-02-26T15:02:08.000Z | venv/Lib/site-packages/skimage/draw/draw.py | amelliaaas/tugastkc4 | f442382c72379e911f3780543b95345a3b1c9407 | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/skimage/draw/draw.py | amelliaaas/tugastkc4 | f442382c72379e911f3780543b95345a3b1c9407 | [
"Apache-2.0"
] | 20 | 2021-11-07T13:55:56.000Z | 2021-12-02T10:54:01.000Z | import warnings
import numpy as np
from .._shared._geometry import polygon_clip
from ._draw import (_coords_inside_image, _line, _line_aa,
_polygon, _ellipse_perimeter,
_circle_perimeter, _circle_perimeter_aa,
_bezier_curve)
def _ellipse_in_shape(shape, center, radii, rotation=0.):
"""Generate coordinates of points within ellipse bounded by shape.
Parameters
----------
shape : iterable of ints
Shape of the input image. Must be at least length 2. Only the first
two values are used to determine the extent of the input image.
center : iterable of floats
(row, column) position of center inside the given shape.
radii : iterable of floats
Size of two half axes (for row and column)
rotation : float, optional
Rotation of the ellipse defined by the above, in radians
in range (-PI, PI), in contra clockwise direction,
with respect to the column-axis.
Returns
-------
rows : iterable of ints
Row coordinates representing values within the ellipse.
cols : iterable of ints
Corresponding column coordinates representing values within the ellipse.
"""
r_lim, c_lim = np.ogrid[0:float(shape[0]), 0:float(shape[1])]
r_org, c_org = center
r_rad, c_rad = radii
rotation %= np.pi
sin_alpha, cos_alpha = np.sin(rotation), np.cos(rotation)
r, c = (r_lim - r_org), (c_lim - c_org)
distances = ((r * cos_alpha + c * sin_alpha) / r_rad) ** 2 \
+ ((r * sin_alpha - c * cos_alpha) / c_rad) ** 2
return np.nonzero(distances < 1)
def ellipse(r, c, r_radius, c_radius, shape=None, rotation=0.):
"""Generate coordinates of pixels within ellipse.
Parameters
----------
r, c : double
Centre coordinate of ellipse.
r_radius, c_radius : double
Minor and major semi-axes. ``(r/r_radius)**2 + (c/c_radius)**2 = 1``.
shape : tuple, optional
Image shape which is used to determine the maximum extent of output pixel
coordinates. This is useful for ellipses which exceed the image size.
By default the full extent of the ellipse are used. Must be at least
length 2. Only the first two values are used to determine the extent.
rotation : float, optional (default 0.)
Set the ellipse rotation (rotation) in range (-PI, PI)
in contra clock wise direction, so PI/2 degree means swap ellipse axis
Returns
-------
rr, cc : ndarray of int
Pixel coordinates of ellipse.
May be used to directly index into an array, e.g.
``img[rr, cc] = 1``.
Examples
--------
>>> from skimage.draw import ellipse
>>> img = np.zeros((10, 12), dtype=np.uint8)
>>> rr, cc = ellipse(5, 6, 3, 5, rotation=np.deg2rad(30))
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
Notes
-----
The ellipse equation::
((x * cos(alpha) + y * sin(alpha)) / x_radius) ** 2 +
((x * sin(alpha) - y * cos(alpha)) / y_radius) ** 2 = 1
Note that the positions of `ellipse` without specified `shape` can have
also, negative values, as this is correct on the plane. On the other hand
using these ellipse positions for an image afterwards may lead to appearing
on the other side of image, because ``image[-1, -1] = image[end-1, end-1]``
>>> rr, cc = ellipse(1, 2, 3, 6)
>>> img = np.zeros((6, 12), dtype=np.uint8)
>>> img[rr, cc] = 1
>>> img
array([[1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1]], dtype=uint8)
"""
center = np.array([r, c])
radii = np.array([r_radius, c_radius])
# allow just rotation with in range +/- 180 degree
rotation %= np.pi
# compute rotated radii by given rotation
r_radius_rot = abs(r_radius * np.cos(rotation)) \
+ c_radius * np.sin(rotation)
c_radius_rot = r_radius * np.sin(rotation) \
+ abs(c_radius * np.cos(rotation))
# The upper_left and lower_right corners of the smallest rectangle
# containing the ellipse.
radii_rot = np.array([r_radius_rot, c_radius_rot])
upper_left = np.ceil(center - radii_rot).astype(int)
lower_right = np.floor(center + radii_rot).astype(int)
if shape is not None:
# Constrain upper_left and lower_right by shape boundary.
upper_left = np.maximum(upper_left, np.array([0, 0]))
lower_right = np.minimum(lower_right, np.array(shape[:2]) - 1)
shifted_center = center - upper_left
bounding_shape = lower_right - upper_left + 1
rr, cc = _ellipse_in_shape(bounding_shape, shifted_center, radii, rotation)
rr.flags.writeable = True
cc.flags.writeable = True
rr += upper_left[0]
cc += upper_left[1]
return rr, cc
def circle(r, c, radius, shape=None):
"""Generate coordinates of pixels within circle.
Parameters
----------
r, c : double
Center coordinate of disk.
radius : double
Radius of disk.
shape : tuple, optional
Image shape which is used to determine the maximum extent of output
pixel coordinates. This is useful for disks that exceed the image
size. If None, the full extent of the disk is used. Must be at least
length 2. Only the first two values are used to determine the extent of
the input image.
Returns
-------
rr, cc : ndarray of int
Pixel coordinates of disk.
May be used to directly index into an array, e.g.
``img[rr, cc] = 1``.
Warns
-----
Deprecated:
.. versionadded:: 0.17
This function is deprecated and will be removed in scikit-image 0.19.
Please use the function named ``disk`` instead.
"""
warnings.warn("`draw.circle` is deprecated in favor of `draw.disk`."
"`draw.circle` will be removed in version 0.19",
FutureWarning, stacklevel=2)
return disk((r, c), radius, shape=shape)
def disk(center, radius, *, shape=None):
"""Generate coordinates of pixels within circle.
Parameters
----------
center : tuple
Center coordinate of disk.
radius : double
Radius of disk.
shape : tuple, optional
Image shape which is used to determine the maximum extent of output
pixel coordinates. This is useful for disks that exceed the image
size. If None, the full extent of the disk is used. Must be at least
length 2. Only the first two values are used to determine the extent of
the input image.
Returns
-------
rr, cc : ndarray of int
Pixel coordinates of disk.
May be used to directly index into an array, e.g.
``img[rr, cc] = 1``.
Examples
--------
>>> from skimage.draw import disk
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> rr, cc = disk((4, 4), 5)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
"""
r, c = center
return ellipse(r, c, radius, radius, shape)
def polygon_perimeter(r, c, shape=None, clip=False):
"""Generate polygon perimeter coordinates.
Parameters
----------
r : (N,) ndarray
Row coordinates of vertices of polygon.
c : (N,) ndarray
Column coordinates of vertices of polygon.
shape : tuple, optional
Image shape which is used to determine maximum extents of output pixel
coordinates. This is useful for polygons that exceed the image size.
If None, the full extents of the polygon is used. Must be at least
length 2. Only the first two values are used to determine the extent of
the input image.
clip : bool, optional
Whether to clip the polygon to the provided shape. If this is set
to True, the drawn figure will always be a closed polygon with all
edges visible.
Returns
-------
rr, cc : ndarray of int
Pixel coordinates of polygon.
May be used to directly index into an array, e.g.
``img[rr, cc] = 1``.
Examples
--------
>>> from skimage.draw import polygon_perimeter
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> rr, cc = polygon_perimeter([5, -1, 5, 10],
... [-1, 5, 11, 5],
... shape=img.shape, clip=True)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 1, 1, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 1, 0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 1, 1, 1, 0, 0, 0]], dtype=uint8)
"""
if clip:
if shape is None:
raise ValueError("Must specify clipping shape")
clip_box = np.array([0, 0, shape[0] - 1, shape[1] - 1])
else:
clip_box = np.array([np.min(r), np.min(c),
np.max(r), np.max(c)])
# Do the clipping irrespective of whether clip is set. This
# ensures that the returned polygon is closed and is an array.
r, c = polygon_clip(r, c, *clip_box)
r = np.round(r).astype(int)
c = np.round(c).astype(int)
# Construct line segments
rr, cc = [], []
for i in range(len(r) - 1):
line_r, line_c = line(r[i], c[i], r[i + 1], c[i + 1])
rr.extend(line_r)
cc.extend(line_c)
rr = np.asarray(rr)
cc = np.asarray(cc)
if shape is None:
return rr, cc
else:
return _coords_inside_image(rr, cc, shape)
def set_color(image, coords, color, alpha=1):
"""Set pixel color in the image at the given coordinates.
Note that this function modifies the color of the image in-place.
Coordinates that exceed the shape of the image will be ignored.
Parameters
----------
image : (M, N, D) ndarray
Image
coords : tuple of ((P,) ndarray, (P,) ndarray)
Row and column coordinates of pixels to be colored.
color : (D,) ndarray
Color to be assigned to coordinates in the image.
alpha : scalar or (N,) ndarray
Alpha values used to blend color with image. 0 is transparent,
1 is opaque.
Examples
--------
>>> from skimage.draw import line, set_color
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> rr, cc = line(1, 1, 20, 20)
>>> set_color(img, (rr, cc), 1)
>>> img
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1]], dtype=uint8)
"""
rr, cc = coords
if image.ndim == 2:
image = image[..., np.newaxis]
color = np.array(color, ndmin=1, copy=False)
if image.shape[-1] != color.shape[-1]:
raise ValueError('Color shape ({}) must match last '
'image dimension ({}).'.format(color.shape[0],
image.shape[-1]))
if np.isscalar(alpha):
# Can be replaced by ``full_like`` when numpy 1.8 becomes
# minimum dependency
alpha = np.ones_like(rr) * alpha
rr, cc, alpha = _coords_inside_image(rr, cc, image.shape, val=alpha)
alpha = alpha[..., np.newaxis]
color = color * alpha
vals = image[rr, cc] * (1 - alpha)
image[rr, cc] = vals + color
def line(r0, c0, r1, c1):
"""Generate line pixel coordinates.
Parameters
----------
r0, c0 : int
Starting position (row, column).
r1, c1 : int
End position (row, column).
Returns
-------
rr, cc : (N,) ndarray of int
Indices of pixels that belong to the line.
May be used to directly index into an array, e.g.
``img[rr, cc] = 1``.
Notes
-----
Anti-aliased line generator is available with `line_aa`.
Examples
--------
>>> from skimage.draw import line
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> rr, cc = line(1, 1, 8, 8)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
"""
return _line(r0, c0, r1, c1)
def line_aa(r0, c0, r1, c1):
"""Generate anti-aliased line pixel coordinates.
Parameters
----------
r0, c0 : int
Starting position (row, column).
r1, c1 : int
End position (row, column).
Returns
-------
rr, cc, val : (N,) ndarray (int, int, float)
Indices of pixels (`rr`, `cc`) and intensity values (`val`).
``img[rr, cc] = val``.
References
----------
.. [1] A Rasterizing Algorithm for Drawing Curves, A. Zingl, 2012
http://members.chello.at/easyfilter/Bresenham.pdf
Examples
--------
>>> from skimage.draw import line_aa
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> rr, cc, val = line_aa(1, 1, 8, 8)
>>> img[rr, cc] = val * 255
>>> img
array([[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 255, 74, 0, 0, 0, 0, 0, 0, 0],
[ 0, 74, 255, 74, 0, 0, 0, 0, 0, 0],
[ 0, 0, 74, 255, 74, 0, 0, 0, 0, 0],
[ 0, 0, 0, 74, 255, 74, 0, 0, 0, 0],
[ 0, 0, 0, 0, 74, 255, 74, 0, 0, 0],
[ 0, 0, 0, 0, 0, 74, 255, 74, 0, 0],
[ 0, 0, 0, 0, 0, 0, 74, 255, 74, 0],
[ 0, 0, 0, 0, 0, 0, 0, 74, 255, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
"""
return _line_aa(r0, c0, r1, c1)
def polygon(r, c, shape=None):
"""Generate coordinates of pixels within polygon.
Parameters
----------
r : (N,) ndarray
Row coordinates of vertices of polygon.
c : (N,) ndarray
Column coordinates of vertices of polygon.
shape : tuple, optional
Image shape which is used to determine the maximum extent of output
pixel coordinates. This is useful for polygons that exceed the image
size. If None, the full extent of the polygon is used. Must be at
least length 2. Only the first two values are used to determine the
extent of the input image.
Returns
-------
rr, cc : ndarray of int
Pixel coordinates of polygon.
May be used to directly index into an array, e.g.
``img[rr, cc] = 1``.
Examples
--------
>>> from skimage.draw import polygon
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> r = np.array([1, 2, 8])
>>> c = np.array([1, 7, 4])
>>> rr, cc = polygon(r, c)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
"""
return _polygon(r, c, shape)
def circle_perimeter(r, c, radius, method='bresenham', shape=None):
"""Generate circle perimeter coordinates.
Parameters
----------
r, c : int
Centre coordinate of circle.
radius : int
Radius of circle.
method : {'bresenham', 'andres'}, optional
bresenham : Bresenham method (default)
andres : Andres method
shape : tuple, optional
Image shape which is used to determine the maximum extent of output
pixel coordinates. This is useful for circles that exceed the image
size. If None, the full extent of the circle is used. Must be at least
length 2. Only the first two values are used to determine the extent of
the input image.
Returns
-------
rr, cc : (N,) ndarray of int
Bresenham and Andres' method:
Indices of pixels that belong to the circle perimeter.
May be used to directly index into an array, e.g.
``img[rr, cc] = 1``.
Notes
-----
Andres method presents the advantage that concentric
circles create a disc whereas Bresenham can make holes. There
is also less distortions when Andres circles are rotated.
Bresenham method is also known as midpoint circle algorithm.
Anti-aliased circle generator is available with `circle_perimeter_aa`.
References
----------
.. [1] J.E. Bresenham, "Algorithm for computer control of a digital
plotter", IBM Systems journal, 4 (1965) 25-30.
.. [2] E. Andres, "Discrete circles, rings and spheres", Computers &
Graphics, 18 (1994) 695-706.
Examples
--------
>>> from skimage.draw import circle_perimeter
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> rr, cc = circle_perimeter(4, 4, 3)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
"""
return _circle_perimeter(r, c, radius, method, shape)
def circle_perimeter_aa(r, c, radius, shape=None):
"""Generate anti-aliased circle perimeter coordinates.
Parameters
----------
r, c : int
Centre coordinate of circle.
radius : int
Radius of circle.
shape : tuple, optional
Image shape which is used to determine the maximum extent of output
pixel coordinates. This is useful for circles that exceed the image
size. If None, the full extent of the circle is used. Must be at least
length 2. Only the first two values are used to determine the extent of
the input image.
Returns
-------
rr, cc, val : (N,) ndarray (int, int, float)
Indices of pixels (`rr`, `cc`) and intensity values (`val`).
``img[rr, cc] = val``.
Notes
-----
Wu's method draws anti-aliased circle. This implementation doesn't use
lookup table optimization.
Use the function ``draw.set_color`` to apply ``circle_perimeter_aa``
results to color images.
References
----------
.. [1] X. Wu, "An efficient antialiasing technique", In ACM SIGGRAPH
Computer Graphics, 25 (1991) 143-152.
Examples
--------
>>> from skimage.draw import circle_perimeter_aa
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> rr, cc, val = circle_perimeter_aa(4, 4, 3)
>>> img[rr, cc] = val * 255
>>> img
array([[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 60, 211, 255, 211, 60, 0, 0, 0],
[ 0, 60, 194, 43, 0, 43, 194, 60, 0, 0],
[ 0, 211, 43, 0, 0, 0, 43, 211, 0, 0],
[ 0, 255, 0, 0, 0, 0, 0, 255, 0, 0],
[ 0, 211, 43, 0, 0, 0, 43, 211, 0, 0],
[ 0, 60, 194, 43, 0, 43, 194, 60, 0, 0],
[ 0, 0, 60, 211, 255, 211, 60, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
>>> from skimage import data, draw
>>> image = data.chelsea()
>>> rr, cc, val = draw.circle_perimeter_aa(r=100, c=100, radius=75)
>>> draw.set_color(image, (rr, cc), [1, 0, 0], alpha=val)
"""
return _circle_perimeter_aa(r, c, radius, shape)
def ellipse_perimeter(r, c, r_radius, c_radius, orientation=0, shape=None):
"""Generate ellipse perimeter coordinates.
Parameters
----------
r, c : int
Centre coordinate of ellipse.
r_radius, c_radius : int
Minor and major semi-axes. ``(r/r_radius)**2 + (c/c_radius)**2 = 1``.
orientation : double, optional
Major axis orientation in clockwise direction as radians.
shape : tuple, optional
Image shape which is used to determine the maximum extent of output
pixel coordinates. This is useful for ellipses that exceed the image
size. If None, the full extent of the ellipse is used. Must be at
least length 2. Only the first two values are used to determine the
extent of the input image.
Returns
-------
rr, cc : (N,) ndarray of int
Indices of pixels that belong to the ellipse perimeter.
May be used to directly index into an array, e.g.
``img[rr, cc] = 1``.
References
----------
.. [1] A Rasterizing Algorithm for Drawing Curves, A. Zingl, 2012
http://members.chello.at/easyfilter/Bresenham.pdf
Examples
--------
>>> from skimage.draw import ellipse_perimeter
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> rr, cc = ellipse_perimeter(5, 5, 3, 4)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
Note that the positions of `ellipse` without specified `shape` can have
also, negative values, as this is correct on the plane. On the other hand
using these ellipse positions for an image afterwards may lead to appearing
on the other side of image, because ``image[-1, -1] = image[end-1, end-1]``
>>> rr, cc = ellipse_perimeter(2, 3, 4, 5)
>>> img = np.zeros((9, 12), dtype=np.uint8)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]], dtype=uint8)
"""
return _ellipse_perimeter(r, c, r_radius, c_radius, orientation, shape)
def bezier_curve(r0, c0, r1, c1, r2, c2, weight, shape=None):
"""Generate Bezier curve coordinates.
Parameters
----------
r0, c0 : int
Coordinates of the first control point.
r1, c1 : int
Coordinates of the middle control point.
r2, c2 : int
Coordinates of the last control point.
weight : double
Middle control point weight, it describes the line tension.
shape : tuple, optional
Image shape which is used to determine the maximum extent of output
pixel coordinates. This is useful for curves that exceed the image
size. If None, the full extent of the curve is used.
Returns
-------
rr, cc : (N,) ndarray of int
Indices of pixels that belong to the Bezier curve.
May be used to directly index into an array, e.g.
``img[rr, cc] = 1``.
Notes
-----
The algorithm is the rational quadratic algorithm presented in
reference [1]_.
References
----------
.. [1] A Rasterizing Algorithm for Drawing Curves, A. Zingl, 2012
http://members.chello.at/easyfilter/Bresenham.pdf
Examples
--------
>>> import numpy as np
>>> from skimage.draw import bezier_curve
>>> img = np.zeros((10, 10), dtype=np.uint8)
>>> rr, cc = bezier_curve(1, 5, 5, -2, 8, 8, 2)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
"""
return _bezier_curve(r0, c0, r1, c1, r2, c2, weight, shape)
def rectangle(start, end=None, extent=None, shape=None):
"""Generate coordinates of pixels within a rectangle.
Parameters
----------
start : tuple
Origin point of the rectangle, e.g., ``([plane,] row, column)``.
end : tuple
End point of the rectangle ``([plane,] row, column)``.
For a 2D matrix, the slice defined by the rectangle is
``[start:(end+1)]``.
Either `end` or `extent` must be specified.
extent : tuple
The extent (size) of the drawn rectangle. E.g.,
``([num_planes,] num_rows, num_cols)``.
Either `end` or `extent` must be specified.
A negative extent is valid, and will result in a rectangle
going along the opposite direction. If extent is negative, the
`start` point is not included.
shape : tuple, optional
Image shape used to determine the maximum bounds of the output
coordinates. This is useful for clipping rectangles that exceed
the image size. By default, no clipping is done.
Returns
-------
coords : array of int, shape (Ndim, Npoints)
The coordinates of all pixels in the rectangle.
Notes
-----
This function can be applied to N-dimensional images, by passing `start` and
`end` or `extent` as tuples of length N.
Examples
--------
>>> import numpy as np
>>> from skimage.draw import rectangle
>>> img = np.zeros((5, 5), dtype=np.uint8)
>>> start = (1, 1)
>>> extent = (3, 3)
>>> rr, cc = rectangle(start, extent=extent, shape=img.shape)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 0],
[0, 1, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 0, 0, 0, 0]], dtype=uint8)
>>> img = np.zeros((5, 5), dtype=np.uint8)
>>> start = (0, 1)
>>> end = (3, 3)
>>> rr, cc = rectangle(start, end=end, shape=img.shape)
>>> img[rr, cc] = 1
>>> img
array([[0, 1, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 0, 0, 0, 0]], dtype=uint8)
>>> import numpy as np
>>> from skimage.draw import rectangle
>>> img = np.zeros((6, 6), dtype=np.uint8)
>>> start = (3, 3)
>>>
>>> rr, cc = rectangle(start, extent=(2, 2))
>>> img[rr, cc] = 1
>>> rr, cc = rectangle(start, extent=(-2, 2))
>>> img[rr, cc] = 2
>>> rr, cc = rectangle(start, extent=(-2, -2))
>>> img[rr, cc] = 3
>>> rr, cc = rectangle(start, extent=(2, -2))
>>> img[rr, cc] = 4
>>> print(img)
[[0 0 0 0 0 0]
[0 3 3 2 2 0]
[0 3 3 2 2 0]
[0 4 4 1 1 0]
[0 4 4 1 1 0]
[0 0 0 0 0 0]]
"""
tl, br = _rectangle_slice(start=start, end=end, extent=extent)
if shape is not None:
n_dim = len(start)
br = np.minimum(shape[0:n_dim], br)
tl = np.maximum(np.zeros_like(shape[0:n_dim]), tl)
coords = np.meshgrid(*[np.arange(st, en) for st, en in zip(tuple(tl),
tuple(br))])
return coords
def rectangle_perimeter(start, end=None, extent=None, shape=None, clip=False):
"""Generate coordinates of pixels that are exactly around a rectangle.
Parameters
----------
start : tuple
Origin point of the inner rectangle, e.g., ``(row, column)``.
end : tuple
End point of the inner rectangle ``(row, column)``.
For a 2D matrix, the slice defined by inner the rectangle is
``[start:(end+1)]``.
Either `end` or `extent` must be specified.
extent : tuple
The extent (size) of the inner rectangle. E.g.,
``(num_rows, num_cols)``.
Either `end` or `extent` must be specified.
Negative extents are permitted. See `rectangle` to better
understand how they behave.
shape : tuple, optional
Image shape used to determine the maximum bounds of the output
coordinates. This is useful for clipping perimeters that exceed
the image size. By default, no clipping is done. Must be at least
length 2. Only the first two values are used to determine the extent of
the input image.
clip : bool, optional
Whether to clip the perimeter to the provided shape. If this is set
to True, the drawn figure will always be a closed polygon with all
edges visible.
Returns
-------
coords : array of int, shape (2, Npoints)
The coordinates of all pixels in the rectangle.
Examples
--------
>>> import numpy as np
>>> from skimage.draw import rectangle_perimeter
>>> img = np.zeros((5, 6), dtype=np.uint8)
>>> start = (2, 3)
>>> end = (3, 4)
>>> rr, cc = rectangle_perimeter(start, end=end, shape=img.shape)
>>> img[rr, cc] = 1
>>> img
array([[0, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1],
[0, 0, 1, 0, 0, 1],
[0, 0, 1, 0, 0, 1],
[0, 0, 1, 1, 1, 1]], dtype=uint8)
>>> img = np.zeros((5, 5), dtype=np.uint8)
>>> r, c = rectangle_perimeter(start, (10, 10), shape=img.shape, clip=True)
>>> img[r, c] = 1
>>> img
array([[0, 0, 0, 0, 0],
[0, 0, 1, 1, 1],
[0, 0, 1, 0, 1],
[0, 0, 1, 0, 1],
[0, 0, 1, 1, 1]], dtype=uint8)
"""
top_left, bottom_right = _rectangle_slice(start=start,
end=end,
extent=extent)
top_left -= 1
r = [top_left[0], top_left[0], bottom_right[0], bottom_right[0],
top_left[0]]
c = [top_left[1], bottom_right[1], bottom_right[1], top_left[1],
top_left[1]]
return polygon_perimeter(r, c, shape=shape, clip=clip)
def _rectangle_slice(start, end=None, extent=None):
"""Return the slice ``(top_left, bottom_right)`` of the rectangle.
Returns
=======
(top_left, bottomm_right)
The slice you would need to select the region in the rectangle defined
by the parameters.
Select it like:
``rect[top_left[0]:bottom_right[0], top_left[1]:bottom_right[1]]``
"""
if end is None and extent is None:
raise ValueError("Either `end` or `extent` must be given.")
if end is not None and extent is not None:
raise ValueError("Cannot provide both `end` and `extent`.")
if extent is not None:
end = np.asarray(start) + np.asarray(extent)
top_left = np.minimum(start, end)
bottom_right = np.maximum(start, end)
if extent is None:
bottom_right += 1
return (top_left, bottom_right)
| 34.762908 | 81 | 0.525856 |
b69e397a87a2e88bb5c0082f549c6356fc4e27bc | 22,739 | py | Python | House Rocket Company/House_Rocket.py | IgorQueiroz32/curso_meigaron_pyhton_ao_ds | 91e7b8336065dc841f620847997156bad6fed35e | [
"MIT"
] | null | null | null | House Rocket Company/House_Rocket.py | IgorQueiroz32/curso_meigaron_pyhton_ao_ds | 91e7b8336065dc841f620847997156bad6fed35e | [
"MIT"
] | null | null | null | House Rocket Company/House_Rocket.py | IgorQueiroz32/curso_meigaron_pyhton_ao_ds | 91e7b8336065dc841f620847997156bad6fed35e | [
"MIT"
] | null | null | null | import pandas as pd
import streamlit as st
import plotly.express as px
st.set_page_config(layout='wide')# deixa a tabela no site maior, mais larga
# read data
@st.cache(allow_output_mutation=True) # funcao q permite ler os dados da memoria virtual
def get_data(path):
data = pd.read_csv(path)
return data
# transformation
# excluding outliers
def data_excluding(data):
data.drop(data[data['bedrooms']>11].index,inplace=True)
data.drop(data[(data['bedrooms']==0) | (data['bathrooms']==0)].index,inplace=True)
data.drop(data[(data['id']==125059179) | (data['id']==125059178)].index,inplace=True)
return data
# data transformation
def set_feature(data):
data['date'] = pd.to_datetime(data['date']).dt.strftime('%Y-%m-%d')
data['month_day'] = pd.to_datetime(data['date']).dt.strftime('%m-%d')
return data
def houses_buy(data):
st.title('House Rocket Company')
st.markdown('Welcome to House Rocket Data Analysis')
# solving first question (which houses should be bought)
st.header('Houses to Buy')
price_median_buy = data[['price', 'zipcode']].groupby('zipcode').median().reset_index()
price_median_buy.columns = ['zipcode', 'price_median_buy']
houses_to_buy = pd.merge(data,price_median_buy,on='zipcode',how='inner')
for i in range(len(houses_to_buy)):
if (houses_to_buy.loc[i, 'price'] < houses_to_buy.loc[i, 'price_median_buy']) & (
houses_to_buy.loc[i, 'condition'] >= 3):
houses_to_buy.loc[i, 'status'] = 'buy'
else:
houses_to_buy.loc[i, 'status'] = 'do not buy'
first_column = houses_to_buy.pop('status')
houses_to_buy.insert(0, 'status', first_column)
st.header(houses_to_buy.shape)
st.write('Num of Houses, Num of Attributes')
st.dataframe(houses_to_buy)
st.write("This table informs which house is indicated to buy, among 21594 houses available. Also it presents all houses characteristics.")
#solving first question second part
st.header('Houses Recommendation to Buy')
for i in range(len(houses_to_buy)):
if (houses_to_buy.loc[i, 'bedrooms'] >= 8) | (houses_to_buy.loc[i, 'sqft_lot'] >= 1074218) | (
houses_to_buy.loc[i, 'bathrooms'] >= 4.25):
houses_to_buy.loc[i, 'recommendation_to_buy'] = 'very_high'
elif (houses_to_buy.loc[i, 'floors'] >= 2) & (houses_to_buy.loc[i, 'bedrooms'] >= 4) & (
houses_to_buy.loc[i, 'bedrooms'] <= 7) & (houses_to_buy.loc[i, 'bathrooms'] >= 2) & (
houses_to_buy.loc[i, 'bathrooms'] <= 4):
houses_to_buy.loc[i, 'recommendation_to_buy'] = 'high'
else:
houses_to_buy.loc[i, 'recommendation_to_buy'] = 'regular'
for i in range(len(houses_to_buy)):
if (houses_to_buy.loc[i, 'month_day'] >= '03-01') & (houses_to_buy.loc[i, 'month_day'] <= '05-31'):
houses_to_buy.loc[i, 'season'] = 'spring'
elif (houses_to_buy.loc[i, 'month_day'] >= '06-01') & (houses_to_buy.loc[i, 'month_day'] <= '08-31'):
houses_to_buy.loc[i, 'season'] = 'summer'
elif (houses_to_buy.loc[i, 'month_day'] >= '09-01') & (houses_to_buy.loc[i, 'month_day'] <= '11-30'):
houses_to_buy.loc[i, 'season'] = 'fall'
else:
houses_to_buy.loc[i, 'season'] = 'winter'
return houses_to_buy
def houses_sell(houses_to_buy):
# solving second question first part (for how much the houses should be sold)
houses_buy_sell = houses_to_buy
houses_buy_sell = houses_buy_sell[houses_buy_sell.status == 'buy']
houses_buy_sell = houses_buy_sell.drop('status', axis=1)
first_column1 = houses_buy_sell.pop('recommendation_to_buy')
houses_buy_sell.insert(0, 'recommendation_to_buy', first_column1)
price_median_sell = houses_buy_sell[['price', 'zipcode', 'season']].groupby(
['zipcode', 'season']).median().reset_index()
price_median_sell.columns = ['zipcode', 'season', 'price_median_sell']
houses_buy_sell = pd.merge(houses_buy_sell, price_median_sell, how='inner')
for i in range(len(houses_buy_sell)):
if (houses_buy_sell.loc[i, 'price'] < houses_buy_sell.loc[i, 'price_median_sell']) & (
houses_buy_sell.loc[i, 'recommendation_to_buy'] == 'regular'):
houses_buy_sell.loc[i, 'price_to_sell'] = houses_buy_sell.loc[i, 'price'] + (
houses_buy_sell.loc[i, 'price'] * 0.3)
elif (houses_buy_sell.loc[i, 'price'] < houses_buy_sell.loc[i, 'price_median_sell']) & (
houses_buy_sell.loc[i, 'recommendation_to_buy'] == 'high'):
houses_buy_sell.loc[i, 'price_to_sell'] = houses_buy_sell.loc[i, 'price'] + (
houses_buy_sell.loc[i, 'price'] * 0.375)
elif (houses_buy_sell.loc[i, 'price'] < houses_buy_sell.loc[i, 'price_median_sell']) & (
houses_buy_sell.loc[i, 'recommendation_to_buy'] == 'very_high'):
houses_buy_sell.loc[i, 'price_to_sell'] = houses_buy_sell.loc[i, 'price'] + (
houses_buy_sell.loc[i, 'price'] * 0.45)
elif (houses_buy_sell.loc[i, 'price'] > houses_buy_sell.loc[i, 'price_median_sell']) & (
houses_buy_sell.loc[i, 'recommendation_to_buy'] == 'regular'):
houses_buy_sell.loc[i, 'price_to_sell'] = houses_buy_sell.loc[i, 'price'] + (
houses_buy_sell.loc[i, 'price'] * 0.10)
elif (houses_buy_sell.loc[i, 'price'] > houses_buy_sell.loc[i, 'price_median_sell']) & (
houses_buy_sell.loc[i, 'recommendation_to_buy'] == 'high'):
houses_buy_sell.loc[i, 'price_to_sell'] = houses_buy_sell.loc[i, 'price'] + (
houses_buy_sell.loc[i, 'price'] * 0.125)
else:
houses_buy_sell.loc[i, 'price_to_sell'] = houses_buy_sell.loc[i, 'price'] + (
houses_buy_sell.loc[i, 'price'] * 0.15)
first_column2 = houses_buy_sell.pop('price_to_sell')
houses_buy_sell.insert(4, 'price_to_sell', first_column2)
return houses_buy_sell
def profits(houses_buy_sell):
# solving second question second part (when sell the houses)
for i in range(len(houses_buy_sell)):
houses_buy_sell.loc[i, 'profit'] = (houses_buy_sell.loc[i, 'price_to_sell']) - (houses_buy_sell.loc[i, 'price'])
first_column3 = houses_buy_sell.pop('profit')
houses_buy_sell.insert(5, 'profit', first_column3)
for i in range(len(houses_buy_sell)):
houses_buy_sell.loc[i, 'profit_percentage_per_house'] = (((houses_buy_sell.loc[i, 'price_to_sell']) - (
houses_buy_sell.loc[i, 'price'])) / houses_buy_sell.loc[i, 'price']) * 100
first_column4 = houses_buy_sell.pop('profit_percentage_per_house')
houses_buy_sell.insert(6, 'profit_percentage_per_house', first_column4)
for i in range(len(houses_buy_sell)):
houses_buy_sell.loc[i, 'profit_percentage_total'] = ((houses_buy_sell.loc[i, 'profit']) / (
houses_buy_sell['profit'].sum())) * 100
first_column5 = houses_buy_sell.pop('profit_percentage_total')
houses_buy_sell.insert(7, 'profit_percentage_total', first_column5)
return houses_buy_sell
def map(houses_buy_sell):
# entire dataframe of houses to buy
f_recommendation_to_buy = st.sidebar.multiselect('Enter Houses Recommendation to Buy',
houses_buy_sell['recommendation_to_buy'].sort_values().unique())
if f_recommendation_to_buy != []:
houses_buy_sell_df = houses_buy_sell.loc[houses_buy_sell['recommendation_to_buy'].isin(f_recommendation_to_buy)]
else:
houses_buy_sell_df = houses_buy_sell.copy()
st.header(houses_buy_sell_df.shape)
st.write('Num of Houses, Num of Attributes')
st.dataframe(houses_buy_sell_df)
st.write("Here the table is organized by houses recommendation, such as:")
st.write("Very high - 19 houses available with number of bedrooms above 8, size above 1074218 sqft and number of bathrooms above 8.")
st.write("High - 755 houses available with number of floors greater or equal to 2, number of bedrooms between 4 and 7, and number of bathrooms between 2 and 4.")
st.write("Regular - 9797 houses available.")
# plot map
high = houses_buy_sell[(houses_buy_sell['recommendation_to_buy'] == 'high') & (houses_buy_sell['condition'] > 3)]
reg = houses_buy_sell[(houses_buy_sell['condition'] == 5) & (houses_buy_sell['recommendation_to_buy'] == 'regular') & (houses_buy_sell['bedrooms'] >= 4) & (houses_buy_sell['bathrooms'] >= 2) | (houses_buy_sell['waterfront'] == 1)]
v_high = houses_buy_sell[houses_buy_sell['recommendation_to_buy'] == 'very_high']
rec1 = high.append(reg)
rec2 = rec1.append(v_high)
if f_recommendation_to_buy != []:
houses_buy_sell_map = rec2.loc[rec2['recommendation_to_buy'].isin(f_recommendation_to_buy)]
else:
houses_buy_sell_map = rec2.copy()
st.header('Portfolio Map')
st.header(houses_buy_sell_map.shape)
st.write('Num of Houses, Num of Attributes')
fig = px.scatter_mapbox(houses_buy_sell_map,
lat = 'lat',
lon = 'long',
color = 'condition',
size = 'price',
color_continuous_scale = 'Bluered_r',
size_max = 15,
zoom = 10)
fig.update_layout(mapbox_style = 'open-street-map')
fig.update_layout(height = 600, margin = {'r':0, 't':0, 'l':0, 'b':0})
st.plotly_chart(fig)
st.write("The map above shows houses that are also organized by the recommendation_to_buy attribute, however the values of these attributes are more filtered, where:")
st.write("Very high - 19 houses available; no water view.")
st.write("High - 124 houses available with condition above 3; no water view.")
st.write("Regular - 111 available houses with condition equal to 5, number of bedrooms above 3, number of bathrooms greater or equal to 2. Also includes some houses that are not within the above characteristics, but have water view.")
return None
def time_sell(houses_buy_sell):
time_to_sell = houses_buy_sell[['profit', 'profit_percentage_total', 'season']].groupby(
['season']).sum().reset_index()
time_to_sell.columns = ['season', 'profit', 'profit_percentage_total']
df1 = houses_buy_sell[['profit', 'season', 'recommendation_to_buy', 'profit_percentage_total']].groupby(
['season', 'recommendation_to_buy']).sum().reset_index()
df2 = houses_buy_sell[['id', 'season', 'recommendation_to_buy']].groupby(
['season', 'recommendation_to_buy']).count().reset_index()
gen_ind_profit = pd.merge(df1, df2, how='inner')
for i in range(len(gen_ind_profit)):
gen_ind_profit.loc[i, 'profit_each_house'] = (gen_ind_profit.loc[i, 'profit']) / (gen_ind_profit.loc[i, 'id'])
gen_ind_profit.columns = ['season', 'recommendation_to_buy', 'total_profit', 'profit_percentage_total',
'num_of_houses', 'mean_profit_each_house']
total_price = houses_buy_sell['price'].sum()
total_profit = houses_buy_sell['profit'].sum()
total = pd.DataFrame([[total_price, total_profit]], columns=['total_price', 'total_profit'])
for i in range(len(total)):
total.loc[i, 'profit_percentage'] = ((total.loc[i, 'total_profit']) / (total.loc[i, 'total_price'])) * 100
st.header('Best Moment to Sell')
st.dataframe(time_to_sell)
st.write("According this table, summer presents the highest amount of profit, with more than 30 percent, so it is the best moment to sell houses.")
st.header('General and Individual Profit')
st.dataframe(gen_ind_profit, height=800)
st.write("Here it is possible to identify the houses profit by season and houses recommendation, also the table shows the mean profit made by each house. ")
st.write("This table informs that regular houses make the highest profit than the others recommendations in every season, flouting between 15.5 and 27.5 percent, with summer presenting the highest profit and winter the lowest.")
st.write("However, dividing the profit by the number of houses, both related to each type of house recommendation, houses very high recommended presents the highest profit among all recommendations. Where summer is at first position with $138675,00 of profit per house; and winter at last position with $88,262.1429.")
st.header('Total Profit Percentage')
st.dataframe(total)
st.write("This table represents the total profit by buying and selling all houses recommended buying. It informs that, by following this project, the company would have a profit of almost 19 percent, which are more than $771 million.")
return None
def hypothesis(houses_to_buy):
st.title('Hypotheses')
st.write('Those hypotheses include all houses from dataset, house to buy and houses not to buy.')
#Hypothesis 01
st.header('Hypothesis 01: Houses with water view are 20% more expensive, on the average.')
dfh1 = houses_to_buy[['waterfront', 'price']]
for i in range(len(dfh1)):
if (dfh1.loc[i, 'waterfront'] == 1):
dfh1.loc[i, 'water_view'] = 'yes'
else:
dfh1.loc[i, 'water_view'] = 'no'
h1 = dfh1[['price', 'water_view']].groupby('water_view').mean().reset_index()
# (produto mais caro - produto mais barato) / produto mais barato * 100
h1_answer = ((h1.loc[1, 'price']) - (h1.loc[0, 'price'])) / (h1.loc[0, 'price']) * 100
c1, c2 = st.beta_columns((1, 1))
fig = px.bar(h1, x='water_view', y='price', color='water_view')
c1.plotly_chart(fig, use_container_width=True)
c2.dataframe(h1)
st.write('False: Houses with water view are {} percent more expensive.'.format(h1_answer))
#Hypothesis 02
st.header('Hypothesis 02: Houses that was built before 1955 are 50% cheaper, on the average.')
dfh2 = houses_to_buy[['yr_built', 'price']]
for i in range(len(dfh2)):
if (dfh2.loc[i, 'yr_built'] >= 1955):
dfh2.loc[i, 'yr_built_1955'] = 'after'
else:
dfh2.loc[i, 'yr_built_1955'] = 'before'
h2 = dfh2[['price', 'yr_built_1955']].groupby('yr_built_1955').mean().reset_index()
# (produto mais barato - produto mais caro) / produto mais caro * 100
h2_answer = ((h2.loc[1, 'price']) - (h2.loc[0, 'price'])) / (h2.loc[0, 'price']) * 100
c1, c2 = st.beta_columns((1, 1))
fig = px.bar(h2, x='yr_built_1955', y='price', color='yr_built_1955')
c1.plotly_chart(fig, use_container_width=True)
c2.dataframe(h2)
st.write('False: Houses that was built before 1955 are {} percent cheaper.'.format(h2_answer))
# Hypothesis 03
st.header('Hypothesis 03: Houses without basement are 40% bigger than house with basement, related to total area (sqft_lot), on average.')
dfh3 = houses_to_buy[['sqft_basement', 'sqft_lot']]
for i in range(len(dfh3)):
if (dfh3.loc[i, 'sqft_basement'] == 0):
dfh3.loc[i, 'basement'] = 'No'
else:
dfh3.loc[i, 'basement'] = 'Yes'
h3 = dfh3[['sqft_lot', 'basement']].groupby('basement').mean().reset_index()
# (produto maior - produto menor) / produto menor * 100
h3_answer = ((h3.loc[0, 'sqft_lot']) - (h3.loc[1, 'sqft_lot'])) / (h3.loc[1, 'sqft_lot']) * 100
c1, c2 = st.beta_columns((1, 1))
fig = px.bar(h3, x='basement', y='sqft_lot', color='basement')
c1.plotly_chart(fig, use_container_width=True)
c2.dataframe(h3)
st.write('False: Houses without basement are {} percent bigger them houses with basement.'.format(h3_answer))
# Hypothesis 04
st.header('Hypothesis 04: The increase of houses price YoY (Year over Year) (May 2014 compared to May 2015) is 10%, in general.')
#Houses price of may 2014 compared to Houses price of may 2015
#COMPOUND ANNUAL GROWTH RATES (CAGR)
dfh4 = houses_to_buy[['date', 'price']]
dfh4['year_month'] = pd.to_datetime(dfh4['date']).dt.strftime('%Y-%m')
dfh4 = dfh4[(dfh4['year_month'] == '2014-05') | (dfh4['year_month'] == '2015-05')]
h4 = dfh4[['price', 'year_month']].groupby('year_month').sum().reset_index()
h4.columns = ['year_month', 'sum_of_price']
# taxa de crescimento mensal simple, mes por mes
# (esse ano - ano passado) / ano passado
h4_answer = ((h4.loc[1, 'sum_of_price']) - (h4.loc[0, 'sum_of_price'])) / (h4.loc[0, 'sum_of_price']) * 100
c1, c2 = st.beta_columns((1, 1))
fig = px.bar(h4, x='year_month', y='sum_of_price', color='year_month')
c1.plotly_chart(fig, use_container_width=True)
c2.dataframe(h4)
st.write('False: The total houses price YoY (Year over Year) suffered a decrease of {} percent.'.format(h4_answer))
# Hypothesis 05
st.header('Hypothesis 05: Houses with 3 bathrooms have an increase MoM (month over Month) of 15%, in general.')
#comparison of all Houses price of all months
#COMPOUND MONTHLY GROWTH RATES (CMGR)
dfh5 = houses_to_buy[['date', 'price', 'bathrooms']]
dfh5['year_month'] = pd.to_datetime(dfh5['date']).dt.strftime('%Y-%m')
dfh5 = dfh5[(dfh5['bathrooms'] == 3)]
h5 = dfh5[['price', 'year_month']].groupby('year_month').sum().reset_index()
h5.columns = ['year_month', 'sum_of_price']
# taxa de crescimento mensal simple, mes por mes
# (esse mes - mes passado) / mes passado
h5['MoM_percentage'] = ((h5['sum_of_price'] - h5['sum_of_price'].shift(1)) / h5['sum_of_price'].shift(1)) * 100
# taxa de crescimento mensal composta, o ultimo mes pelo primeiro mes contando todos os meses do meio
# (last/first)**(1/periods)-1
cmgr = (((h5.loc[12, 'sum_of_price']) / (h5.loc[0, 'sum_of_price'])) ** (
1 / (h5['sum_of_price'].count() - 1)) - 1) * 100
c1, c2 = st.beta_columns((1, 1))
fig = px.line(h5, x='year_month', y='sum_of_price')
c1.plotly_chart(fig, use_container_width=True)
c2.dataframe(h5)
st.write('False: The total houses price MoM (month over Month) suffered a decrease of {} percent.'.format(cmgr))
# Hypothesis 06
st.header('Hypothesis 06: Houses with number of bedrooms above 8 have a number of bathrooms 40% higher than houses with number of bedrooms between 5 and 8, and 94% higher than houses with number of bedrooms between 1 and, 4 on average.')
dfh6 = houses_to_buy[['bedrooms', 'bathrooms']]
for i in range(len(dfh6)):
if (dfh6 .loc[i, 'bedrooms'] > 8):
dfh6 .loc[i, 'bedrooms_level'] = 3
elif (dfh6 .loc[i, 'bedrooms'] >= 5) & (dfh6 .loc[i, 'bedrooms'] <= 8):
dfh6 .loc[i, 'bedrooms_level'] = 2
else:
dfh6 .loc[i, 'bedrooms_level'] = 1
h6 = dfh6[['bathrooms', 'bedrooms_level']].groupby('bedrooms_level').mean().reset_index()
# (produto maior - produto menor) / produto menor * 100
for i in range(len(h6)):
h6.loc[i, 'percentage'] = ((h6.loc[2, 'bathrooms'] - h6.loc[i, 'bathrooms']) / h6.loc[i, 'bathrooms']) * 100
c1, c2 = st.beta_columns((1, 1))
fig = px.bar(h6, x='bedrooms_level', y='bathrooms', color='bedrooms_level')
c1.plotly_chart(fig, use_container_width=True)
c2.dataframe(h6)
st.write('True: Houses with number of bedrooms above 8 have a number of bathrooms {} percent higher than houses with number of bedrooms between 5 and 8, and {} higher than houses with number of bedrooms between 1 and 4.'.format(h6.iloc[1,2],h6.iloc[0,2]))
# Hypothesis 07
st.header('Hypothesis 07: Houses with 7 bedrooms has the total area (sqft_lot) bigger between 132 to 320 percent than houses with 8 to 11 bedrooms, on average.')
dfh7 = houses_to_buy[['bedrooms', 'sqft_lot']]
h7 = dfh7[['bedrooms', 'sqft_lot']].groupby('bedrooms').mean().reset_index()
# (produto maior - produto menor) / produto menor * 100
for i in range(len(h7)):
h7.loc[i, 'percentage'] = ((h7.loc[6, 'sqft_lot'] - h7.loc[i, 'sqft_lot']) / h7.loc[i, 'sqft_lot']) * 100
c1, c2 = st.beta_columns((1, 1))
fig = px.line(h7, x='bedrooms', y='sqft_lot')
c1.plotly_chart(fig, use_container_width=True)
c2.dataframe(h7)
st.write('True: Houses with 7 bedrooms has the total area (sqft_lot) bigger between {} and {} percent than houses with 8 to 11 bedrooms.'.format(h7.iloc[7,2],h7.iloc[10,2]))
# Hypothesis 08
st.header('Hypothesis 08: Renovated Houses have living rooms 12% bigger than house not renovated, on average.')
dfh8 = houses_to_buy[['sqft_living', 'yr_renovated']]
for i in range(len(dfh8)):
if (dfh8.loc[i, 'yr_renovated'] == 0):
dfh8.loc[i, 'renovated'] = 'No'
else:
dfh8.loc[i, 'renovated'] = 'Yes'
h8 = dfh8[['sqft_living', 'renovated']].groupby('renovated').mean().reset_index()
# (produto maior - produto menor) / produto menor * 100
h8_answer = ((h8.loc[1, 'sqft_living']) - (h8.loc[0, 'sqft_living'])) / (h8.loc[0, 'sqft_living']) * 100
c1, c2 = st.beta_columns((1, 1))
fig = px.bar(h8, x='renovated', y='sqft_living', color='renovated')
c1.plotly_chart(fig, use_container_width=True)
c2.dataframe(h8)
st.write('True: Renovated Houses have living rooms {} percent bigger than houses not renovated, on average.'.format(h8_answer))
st.title('Conclusion')
st.write('In conclusion, it is possible to identify that the application of data analytics project at dataset from House Rocket Company was very successful, providing a huge profit opportunity based on which houses to buy and when to sell.')
st.title('Next Steps')
st.write('Other project that can be made with this dataset is the exploration data analyses, which identify the best’s attributes in order to apply machine learning algorithms, with the objective to predict the price of futures houses to buy.')
return None
if __name__=="__main__":
#ETL
# data extraction
path = 'kc_house_data.csv'
data = get_data(path)
#transformatiom
data = data_excluding(data)
data = set_feature(data)
x = houses_buy(data)
y = houses_sell(x)
z = profits(y)
map(z)
time_sell(z)
hypothesis(x) | 45.844758 | 323 | 0.642421 |
591200edf7d402038cebd0f5047ca001d97fec9a | 17,884 | py | Python | tests/python/pants_test/reporting/test_reporting_integration.py | MEDIARITHMICS/pants | 8b2ad3263b73686832dab0bfffe44b9b1a1adbb3 | [
"Apache-2.0"
] | 1 | 2020-06-13T22:01:39.000Z | 2020-06-13T22:01:39.000Z | tests/python/pants_test/reporting/test_reporting_integration.py | MEDIARITHMICS/pants | 8b2ad3263b73686832dab0bfffe44b9b1a1adbb3 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/reporting/test_reporting_integration.py | MEDIARITHMICS/pants | 8b2ad3263b73686832dab0bfffe44b9b1a1adbb3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import json
import re
import unittest
from collections import defaultdict
from http.server import BaseHTTPRequestHandler
from pathlib import Path
import psutil
from pants.testutil.pants_run_integration_test import PantsRunIntegrationTest
from pants.util.collections import assert_single_element
from pants.util.contextutil import http_server
_HEADER = "invocation_id,task_name,targets_hash,target_id,cache_key_id,cache_key_hash,phase,valid"
_REPORT_LOCATION = "reports/latest/invalidation-report.csv"
_ENTRY = re.compile(r"^\d+,\S+,(init|pre-check|post-check),(True|False)")
_INIT = re.compile(r"^\d+,RscCompile_compile_rsc,\w+,\S+,init,(True|False)")
_POST = re.compile(r"^\d+,RscCompile_compile_rsc,\w+,\S+,post-check,(True|False)")
_PRE = re.compile(r"^\d+,RscCompile_compile_rsc,\w+,\S+,pre-check,(True|False)")
class TestReportingIntegrationTest(PantsRunIntegrationTest, unittest.TestCase):
def test_invalidation_report_output(self):
with self.temporary_workdir() as workdir:
command = [
"compile",
"examples/src/java/org/pantsbuild/example/hello/main",
"--reporting-invalidation-report",
]
pants_run = self.run_pants_with_workdir(command, workdir)
self.assert_success(pants_run)
output = Path(workdir, _REPORT_LOCATION)
self.assertTrue(output.exists())
output_contents = output.read_text().splitlines()
self.assertEqual(_HEADER, output_contents[0])
init = False
pre = False
post = False
for line in output_contents[1:]:
self.assertTrue(_ENTRY.match(line))
if _INIT.match(line):
init = True
elif _PRE.match(line):
pre = True
elif _POST.match(line):
post = True
self.assertTrue(init and pre and post)
def test_invalidation_report_clean_all(self):
with self.temporary_workdir() as workdir:
command = [
"clean-all",
"compile",
"examples/src/java/org/pantsbuild/example/hello/main",
"--reporting-invalidation-report",
]
pants_run = self.run_pants_with_workdir(command, workdir)
self.assert_success(pants_run)
# The 'latest' link has been removed by clean-all but that's not fatal.
report_dirs = list(Path(workdir, "reports").iterdir())
self.assertEqual(1, len(report_dirs))
output = Path(workdir, "reports", report_dirs[0], "invalidation-report.csv")
self.assertTrue(output.exists(), msg=f"Missing report file {output}")
INFO_LEVEL_COMPILE_MSG = "Compiling 1 mixed source in 1 target (examples/src/java/org/pantsbuild/example/hello/simple:simple)."
DEBUG_LEVEL_COMPILE_MSG = "examples/src/java/org/pantsbuild/example/hello/simple:simple) finished with status Successful"
def test_output_level_warn(self):
command = [
"compile",
"examples/src/java/org/pantsbuild/example/hello/simple",
"--compile-rsc-level=warn",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
self.assertFalse(self.INFO_LEVEL_COMPILE_MSG in pants_run.stdout_data)
self.assertFalse(self.DEBUG_LEVEL_COMPILE_MSG in pants_run.stdout_data)
def test_output_level_info(self):
command = [
"compile",
"examples/src/java/org/pantsbuild/example/hello/simple",
"--compile-rsc-level=info",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
self.assertTrue(self.INFO_LEVEL_COMPILE_MSG in pants_run.stdout_data)
self.assertFalse(self.DEBUG_LEVEL_COMPILE_MSG in pants_run.stdout_data)
def test_output_level_debug(self):
command = [
"compile",
"examples/src/java/org/pantsbuild/example/hello/simple",
"--compile-rsc-level=debug",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
self.assertTrue(self.INFO_LEVEL_COMPILE_MSG in pants_run.stdout_data)
self.assertTrue(self.DEBUG_LEVEL_COMPILE_MSG in pants_run.stdout_data)
def test_output_color_enabled(self):
command = [
"compile",
"examples/src/java/org/pantsbuild/example/hello/simple",
"--compile-rsc-colors",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
self.assertTrue(self.INFO_LEVEL_COMPILE_MSG + "\x1b[0m" in pants_run.stdout_data)
def test_output_level_group_compile(self):
"""Set level with the scope 'compile' and see that it propagates to the task level."""
command = [
"compile",
"examples/src/java/org/pantsbuild/example/hello/simple",
"--compile-level=debug",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
self.assertTrue(self.INFO_LEVEL_COMPILE_MSG in pants_run.stdout_data)
self.assertTrue(self.DEBUG_LEVEL_COMPILE_MSG in pants_run.stdout_data)
def test_default_console(self):
command = ["--no-colors", "compile", "examples/src/java/org/pantsbuild/example/hello::"]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
self.assertIn(
"Compiling 1 mixed source in 1 target (examples/src/java/org/pantsbuild/example/hello/greet:greet)",
pants_run.stdout_data,
)
# Check rsc's label
self.assertIn("[rsc]\n", pants_run.stdout_data)
def test_suppress_compiler_output(self):
command = [
"compile",
"examples/src/java/org/pantsbuild/example/hello::",
'--reporting-console-label-format={ "COMPILER" : "SUPPRESS" }',
'--reporting-console-tool-output-format={ "COMPILER" : "CHILD_SUPPRESS"}',
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
self.assertIn(
"Compiling 1 mixed source in 1 target (examples/src/java/org/pantsbuild/example/hello/greet:greet)",
pants_run.stdout_data,
)
for line in pants_run.stdout_data:
# rsc's stdout should be suppressed
self.assertNotIn("Compile success at ", line)
# rsc's label should be suppressed
self.assertNotIn("[rsc]", line)
def test_suppress_background_workunits_output(self):
command = ["compile", "examples/src/java/org/pantsbuild/example/hello::"]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
# background workunit label should be suppressed
self.assertNotIn("[background]", pants_run.stdout_data)
# labels of children of the background workunit should be suppressed
self.assertNotIn("[workdir_build_cleanup]", pants_run.stdout_data)
def test_invalid_config(self):
command = [
"compile",
"examples/src/java/org/pantsbuild/example/hello::",
'--reporting-console-label-format={ "FOO" : "BAR" }',
'--reporting-console-tool-output-format={ "BAZ" : "QUX"}',
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
self.assertIn(
"*** Got invalid key FOO for --reporting-console-label-format. Expected one of [",
pants_run.stdout_data,
)
self.assertIn(
"*** Got invalid value BAR for --reporting-console-label-format. Expected one of [",
pants_run.stdout_data,
)
self.assertIn(
"*** Got invalid key BAZ for --reporting-console-tool-output-format. Expected one of [",
pants_run.stdout_data,
)
self.assertIn(
"*** Got invalid value QUX for --reporting-console-tool-output-format. Expected one of [",
pants_run.stdout_data,
)
self.assertIn("", pants_run.stdout_data)
def test_epilog_to_stderr(self) -> None:
def run_test(quiet_flag: str) -> None:
command = [
"--time",
quiet_flag,
"bootstrap",
"examples/src/java/org/pantsbuild/example/hello::",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
self.assertIn("Cumulative Timings", pants_run.stderr_data)
self.assertNotIn("Cumulative Timings", pants_run.stdout_data)
run_test("--quiet")
run_test("--no-quiet")
def test_zipkin_reporter(self):
ZipkinHandler = zipkin_handler()
with http_server(ZipkinHandler) as port:
endpoint = f"http://localhost:{port}"
command = [
"-ldebug",
f"--reporting-zipkin-endpoint={endpoint}",
"minimize",
"examples/src/java/org/pantsbuild/example/hello/simple",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
child_processes = self.find_child_processes_that_send_spans(pants_run.stderr_data)
self.assertTrue(child_processes)
self.wait_spans_to_be_sent(child_processes)
trace = assert_single_element(ZipkinHandler.traces.values())
main_span = self.find_spans_by_name(trace, "main")
self.assertEqual(len(main_span), 1)
parent_id = main_span[0]["id"]
main_children = self.find_spans_by_parentId(trace, parent_id)
self.assertTrue(main_children)
self.assertTrue(any(span["name"] == "minimize" for span in main_children))
def test_zipkin_reporter_with_given_trace_id_parent_id(self):
ZipkinHandler = zipkin_handler()
with http_server(ZipkinHandler) as port:
endpoint = f"http://localhost:{port}"
trace_id = "aaaaaaaaaaaaaaaa"
parent_span_id = "ffffffffffffffff"
command = [
"-ldebug",
f"--reporting-zipkin-endpoint={endpoint}",
f"--reporting-zipkin-trace-id={trace_id}",
f"--reporting-zipkin-parent-id={parent_span_id}",
"minimize",
"examples/src/java/org/pantsbuild/example/hello/simple",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
child_processes = self.find_child_processes_that_send_spans(pants_run.stderr_data)
self.assertTrue(child_processes)
self.wait_spans_to_be_sent(child_processes)
trace = assert_single_element(ZipkinHandler.traces.values())
main_span = self.find_spans_by_name(trace, "main")
self.assertEqual(len(main_span), 1)
main_span_trace_id = main_span[0]["traceId"]
self.assertEqual(main_span_trace_id, trace_id)
main_span_parent_id = main_span[0]["parentId"]
self.assertEqual(main_span_parent_id, parent_span_id)
parent_id = main_span[0]["id"]
main_children = self.find_spans_by_parentId(trace, parent_id)
self.assertTrue(main_children)
self.assertTrue(any(span["name"] == "minimize" for span in main_children))
def test_zipkin_reporter_with_zero_sample_rate(self):
ZipkinHandler = zipkin_handler()
with http_server(ZipkinHandler) as port:
endpoint = f"http://localhost:{port}"
command = [
"-ldebug",
f"--reporting-zipkin-endpoint={endpoint}",
"--reporting-zipkin-sample-rate=0.0",
"minimize",
"examples/src/java/org/pantsbuild/example/hello/simple",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
child_processes = self.find_child_processes_that_send_spans(pants_run.stderr_data)
self.assertFalse(child_processes)
num_of_traces = len(ZipkinHandler.traces)
self.assertEqual(num_of_traces, 0)
def test_zipkin_reporter_for_v2_engine(self):
ZipkinHandler = zipkin_handler()
with http_server(ZipkinHandler) as port:
endpoint = f"http://localhost:{port}"
command = [
"-ldebug",
f"--reporting-zipkin-endpoint={endpoint}",
"--reporting-zipkin-trace-v2",
"minimize",
"examples/src/java/org/pantsbuild/example/hello/simple",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
child_processes = self.find_child_processes_that_send_spans(pants_run.stderr_data)
self.assertTrue(child_processes)
self.wait_spans_to_be_sent(child_processes)
trace = assert_single_element(ZipkinHandler.traces.values())
v2_span_name_part = "snapshot"
self.assertTrue(
any(v2_span_name_part in span["name"] for span in trace),
"There is no span that contains '{}' in it's name. The trace:{}".format(
v2_span_name_part, trace
),
)
def test_zipkin_reports_for_pure_v2_goals(self):
ZipkinHandler = zipkin_handler()
with http_server(ZipkinHandler) as port:
endpoint = f"http://localhost:{port}"
command = [
"-ldebug",
"--no-v1",
"--v2",
f"--reporting-zipkin-endpoint={endpoint}",
"--reporting-zipkin-trace-v2",
"list",
"3rdparty:",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
child_processes = self.find_child_processes_that_send_spans(pants_run.stderr_data)
self.assertTrue(child_processes)
self.wait_spans_to_be_sent(child_processes)
trace = assert_single_element(ZipkinHandler.traces.values())
v2_span_name_part = "snapshot"
self.assertTrue(
any(v2_span_name_part in span["name"] for span in trace),
"There is no span that contains '{}' in it's name. The trace:{}".format(
v2_span_name_part, trace
),
)
def test_zipkin_reporter_multi_threads(self):
ZipkinHandler = zipkin_handler()
with http_server(ZipkinHandler) as port:
endpoint = f"http://localhost:{port}"
command = [
"-ldebug",
f"--reporting-zipkin-endpoint={endpoint}",
"compile",
"examples/src/scala/org/pantsbuild/example/several_scala_targets::",
]
pants_run = self.run_pants(command)
self.assert_success(pants_run)
child_processes = self.find_child_processes_that_send_spans(pants_run.stderr_data)
self.assertTrue(child_processes)
self.wait_spans_to_be_sent(child_processes)
trace = assert_single_element(ZipkinHandler.traces.values())
rsc_task_span = self.find_spans_by_name_and_service_name(trace, "rsc", "pants/task")
self.assertEqual(len(rsc_task_span), 1)
rsc_task_span_id = rsc_task_span[0]["id"]
compile_workunit_spans = self.find_spans_by_name_and_service_name(
trace, "compile", "pants/workunit"
)
self.assertEqual(len(compile_workunit_spans), 4)
self.assertTrue(
all(span["parentId"] == rsc_task_span_id for span in compile_workunit_spans)
)
@staticmethod
def find_spans_by_name_and_service_name(trace, name, service_name):
return [
span
for span in trace
if span["name"] == name
and span["annotations"][0]["endpoint"]["serviceName"] == service_name
]
@staticmethod
def find_spans_by_name(trace, name):
return [span for span in trace if span["name"] == name]
@staticmethod
def find_spans_by_parentId(trace, parent_id):
return [span for span in trace if span.get("parentId") == parent_id]
@staticmethod
def find_child_processes_that_send_spans(pants_result_stderr):
child_processes = set()
for line in pants_result_stderr.split("\n"):
if "Sending spans to Zipkin server from pid:" in line:
i = line.rindex(":")
child_process_pid = line[i + 1 :]
child_processes.add(int(child_process_pid))
return child_processes
@staticmethod
def wait_spans_to_be_sent(child_processes):
existing_child_processes = child_processes.copy()
while existing_child_processes:
for child_pid in child_processes:
if child_pid in existing_child_processes and not psutil.pid_exists(child_pid):
existing_child_processes.remove(child_pid)
def zipkin_handler():
class ZipkinHandler(BaseHTTPRequestHandler):
traces = defaultdict(list)
def do_POST(self):
content_length = self.headers.get("content-length")
json_trace = self.rfile.read(int(content_length))
trace = json.loads(json_trace)
for span in trace:
trace_id = span["traceId"]
self.__class__.traces[trace_id].append(span)
self.send_response(200)
return ZipkinHandler
| 40.188764 | 131 | 0.618206 |
df2ecb44eb0d7f2e2f4753d3a8f74bd4124f6e3c | 452 | py | Python | venv/Scripts/easy_install-script.py | SeoMooseong/python_chetbot | f817d538f5286b582152c8f46dc94304401808cf | [
"MIT"
] | null | null | null | venv/Scripts/easy_install-script.py | SeoMooseong/python_chetbot | f817d538f5286b582152c8f46dc94304401808cf | [
"MIT"
] | null | null | null | venv/Scripts/easy_install-script.py | SeoMooseong/python_chetbot | f817d538f5286b582152c8f46dc94304401808cf | [
"MIT"
] | null | null | null | #!C:\Users\student\PycharmProjects\chatbot\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
)
| 34.769231 | 83 | 0.699115 |
4ddf1bdb0336755c7a7ea439684932edb90ed4f8 | 806 | py | Python | dynamic_systems/identification/foptd.py | eddardd/CrossDomainFaultDetection | 83dd24727a8b35cda2549b40166beaf740e14c98 | [
"MIT"
] | 3 | 2021-08-30T11:41:36.000Z | 2021-12-22T10:45:25.000Z | dynamic_systems/identification/foptd.py | eddardd/CrossDomainFaultDiagnosis | 83dd24727a8b35cda2549b40166beaf740e14c98 | [
"MIT"
] | 1 | 2021-02-26T06:02:33.000Z | 2021-02-26T06:02:33.000Z | dynamic_systems/identification/foptd.py | eddardd/CrossDomainFaultDetection | 83dd24727a8b35cda2549b40166beaf740e14c98 | [
"MIT"
] | 2 | 2021-06-03T11:46:20.000Z | 2022-03-25T09:16:03.000Z | import numpy as np
class FOPTD:
def __init__(self):
self.alpha = None
self.fitted = False
def __phi(self, i, t, y, dt):
vec = np.zeros([1, 3])
vec[0, 0] = sum(y[:i]) * dt
vec[0, 1] = t[i]
vec[0, 2] = -1
return vec
def fit(self, t, y):
dt = t[1] - t[0]
Phi = np.concatenate([self.__phi(i, t, y, dt) for i in range(len(t))], axis=0)
self.alpha = np.linalg.pinv(Phi) @ (y.reshape(-1, 1))
self.fitted = True
def predict(self):
if self.fitted:
tau = - self.alpha[0, 0] ** (-1)
K = self.alpha[1, 0] * tau
theta = self.alpha[2, 0] / K
return [tau, K, theta]
else:
raise ValueError('Trying to use predict in a unfitted model.') | 26 | 86 | 0.48139 |
bc771984d5a501d29279afd40d28fe6e9a7e48f3 | 455 | py | Python | test/functional/test_framework/beerchainconfig.py | beerchainproject/beerchain | 85a2701aff25358bbf2da6651d407a8d63ea2be8 | [
"MIT"
] | null | null | null | test/functional/test_framework/beerchainconfig.py | beerchainproject/beerchain | 85a2701aff25358bbf2da6651d407a8d63ea2be8 | [
"MIT"
] | null | null | null | test/functional/test_framework/beerchainconfig.py | beerchainproject/beerchain | 85a2701aff25358bbf2da6651d407a8d63ea2be8 | [
"MIT"
] | 2 | 2020-09-15T08:11:39.000Z | 2021-03-22T21:56:28.000Z | COINBASE_MATURITY = 500
INITIAL_BLOCK_REWARD = 20000
INITIAL_HASH_UTXO_ROOT = 0x21b463e3b52f6201c0ad6c991be0485b6ef8c092e64583ffa655cc1b171fe856
INITIAL_HASH_STATE_ROOT = 0x9514771014c9ae803d8cea2731b2063e83de44802b40dce2d06acd02d0ff65e9
MAX_BLOCK_BASE_SIZE = 2000000
BEERCHAIN_MIN_GAS_PRICE = 40
BEERCHAIN_MIN_GAS_PRICE_STR = "0.00000040"
NUM_DEFAULT_DGP_CONTRACTS = 5
MPOS_PARTICIPANTS = 10
LAST_POW_BLOCK = 5000
BLOCKS_BEFORE_PROPOSAL_EXPIRATION = 216
| 37.916667 | 92 | 0.896703 |
96a7877f6984b25be0e4d080f42e2e73a6769593 | 8,911 | py | Python | sdk/python/pulumi_azure_native/aad/v20210501/get_ou_container.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/aad/v20210501/get_ou_container.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/aad/v20210501/get_ou_container.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetOuContainerResult',
'AwaitableGetOuContainerResult',
'get_ou_container',
]
@pulumi.output_type
class GetOuContainerResult:
"""
Resource for OuContainer.
"""
def __init__(__self__, accounts=None, container_id=None, deployment_id=None, distinguished_name=None, domain_name=None, etag=None, id=None, location=None, name=None, provisioning_state=None, service_status=None, system_data=None, tags=None, tenant_id=None, type=None):
if accounts and not isinstance(accounts, list):
raise TypeError("Expected argument 'accounts' to be a list")
pulumi.set(__self__, "accounts", accounts)
if container_id and not isinstance(container_id, str):
raise TypeError("Expected argument 'container_id' to be a str")
pulumi.set(__self__, "container_id", container_id)
if deployment_id and not isinstance(deployment_id, str):
raise TypeError("Expected argument 'deployment_id' to be a str")
pulumi.set(__self__, "deployment_id", deployment_id)
if distinguished_name and not isinstance(distinguished_name, str):
raise TypeError("Expected argument 'distinguished_name' to be a str")
pulumi.set(__self__, "distinguished_name", distinguished_name)
if domain_name and not isinstance(domain_name, str):
raise TypeError("Expected argument 'domain_name' to be a str")
pulumi.set(__self__, "domain_name", domain_name)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if service_status and not isinstance(service_status, str):
raise TypeError("Expected argument 'service_status' to be a str")
pulumi.set(__self__, "service_status", service_status)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if tenant_id and not isinstance(tenant_id, str):
raise TypeError("Expected argument 'tenant_id' to be a str")
pulumi.set(__self__, "tenant_id", tenant_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def accounts(self) -> Optional[Sequence['outputs.ContainerAccountResponse']]:
"""
The list of container accounts
"""
return pulumi.get(self, "accounts")
@property
@pulumi.getter(name="containerId")
def container_id(self) -> str:
"""
The OuContainer name
"""
return pulumi.get(self, "container_id")
@property
@pulumi.getter(name="deploymentId")
def deployment_id(self) -> str:
"""
The Deployment id
"""
return pulumi.get(self, "deployment_id")
@property
@pulumi.getter(name="distinguishedName")
def distinguished_name(self) -> str:
"""
Distinguished Name of OuContainer instance
"""
return pulumi.get(self, "distinguished_name")
@property
@pulumi.getter(name="domainName")
def domain_name(self) -> str:
"""
The domain name of Domain Services.
"""
return pulumi.get(self, "domain_name")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
Resource etag
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The current deployment or provisioning state, which only appears in the response.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="serviceStatus")
def service_status(self) -> str:
"""
Status of OuContainer instance
"""
return pulumi.get(self, "service_status")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system meta data relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
Azure Active Directory tenant id
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
class AwaitableGetOuContainerResult(GetOuContainerResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetOuContainerResult(
accounts=self.accounts,
container_id=self.container_id,
deployment_id=self.deployment_id,
distinguished_name=self.distinguished_name,
domain_name=self.domain_name,
etag=self.etag,
id=self.id,
location=self.location,
name=self.name,
provisioning_state=self.provisioning_state,
service_status=self.service_status,
system_data=self.system_data,
tags=self.tags,
tenant_id=self.tenant_id,
type=self.type)
def get_ou_container(domain_service_name: Optional[str] = None,
ou_container_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetOuContainerResult:
"""
Resource for OuContainer.
:param str domain_service_name: The name of the domain service.
:param str ou_container_name: The name of the OuContainer.
:param str resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
__args__ = dict()
__args__['domainServiceName'] = domain_service_name
__args__['ouContainerName'] = ou_container_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:aad/v20210501:getOuContainer', __args__, opts=opts, typ=GetOuContainerResult).value
return AwaitableGetOuContainerResult(
accounts=__ret__.accounts,
container_id=__ret__.container_id,
deployment_id=__ret__.deployment_id,
distinguished_name=__ret__.distinguished_name,
domain_name=__ret__.domain_name,
etag=__ret__.etag,
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
service_status=__ret__.service_status,
system_data=__ret__.system_data,
tags=__ret__.tags,
tenant_id=__ret__.tenant_id,
type=__ret__.type)
| 35.361111 | 272 | 0.639883 |
58046e8d0e44a6890164e8c10b1473341d105c4a | 1,516 | py | Python | q6.py | cs-fullstack-2019-fall/codeassessment2-jordondoug2019 | 3e92507663bf76652919d4dd1e92790197bcfc97 | [
"Apache-2.0"
] | null | null | null | q6.py | cs-fullstack-2019-fall/codeassessment2-jordondoug2019 | 3e92507663bf76652919d4dd1e92790197bcfc97 | [
"Apache-2.0"
] | null | null | null | q6.py | cs-fullstack-2019-fall/codeassessment2-jordondoug2019 | 3e92507663bf76652919d4dd1e92790197bcfc97 | [
"Apache-2.0"
] | null | null | null | # ### Problem 6
# Create a class called ClubMember
# Each club member has a name and a role
# Create ClubMember instances for the following people:
# ```
# Alfred - Club President
# Troy - Club Vice President
# Albert - Club Secretary
# Bob - Club Treasurer
# ```
# Add each member instance to a new club_members list that you create.
# Write the code needed to loop through the club member list and print the current number of members in the list, then the member’s name and club role, one per line using f strings.
# Example Output:
# ```
# There are currently 4 club members in the list!
# Club President: Alfred
# Club Vice President: Troy
# Club Secretary: Albert
# Club Treasurer: Bob
# ```
# create class for clubmember with print method
class Clubmember:
def __init__(self, name, role):
self.name = name
self.role = role
def __str__(self):
return f"{self.name} {self.role} \n"
# create multiple instances of Clubmember
mem1 = Clubmember("Alfred", "Club President")
mem2 = Clubmember(" Troy", "Club Vice President")
mem3 = Clubmember("Albert", "Club Secretary")
mem4 = Clubmember("Bob", "Club Treasurer")
# create list of club members
newclubmembers = [mem1, mem2, mem3, mem4]
# create for loop that iterates through each index and prints name of members
for each in range(len(newclubmembers)):
print(newclubmembers[each])
# print(f"{newclubmembers[0]}"
# f"{newclubmembers[1]}"
# f"{newclubmembers[2]}"
# f"{newclubmembers[3]}")
| 31.583333 | 181 | 0.69591 |
72be10d8c9289cdac4e160883cf46b158421b48b | 11,177 | py | Python | st2api/st2api/controllers/v1/aliasexecution.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | null | null | null | st2api/st2api/controllers/v1/aliasexecution.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | 15 | 2021-02-11T22:58:54.000Z | 2021-08-06T18:03:47.000Z | st2api/st2api/controllers/v1/aliasexecution.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | 1 | 2021-07-10T15:02:29.000Z | 2021-07-10T15:02:29.000Z | # Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
import jsonschema
from jinja2.exceptions import UndefinedError
from oslo_config import cfg
from st2api.controllers.base import BaseRestControllerMixin
from st2common import log as logging
from st2common.exceptions.actionalias import ActionAliasAmbiguityException
from st2common.exceptions.db import StackStormDBObjectNotFoundError
from st2common.models.api.action import ActionAliasAPI
from st2common.models.api.action import AliasMatchAndExecuteInputAPI
from st2common.models.api.auth import get_system_username
from st2common.models.api.execution import ActionExecutionAPI
from st2common.models.db.auth import UserDB
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.db.notification import NotificationSchema, NotificationSubSchema
from st2common.models.utils import action_param_utils
from st2common.models.utils.action_alias_utils import extract_parameters_for_action_alias_db
from st2common.models.utils.action_alias_utils import inject_immutable_parameters
from st2common.persistence.actionalias import ActionAlias
from st2common.services import action as action_service
from st2common.util import action_db as action_utils
from st2common.util import reference
from st2common.util.actionalias_matching import get_matching_alias
from st2common.util.jinja import render_values as render
from st2common.rbac.types import PermissionType
from st2common.rbac.backends import get_rbac_backend
from st2common.router import abort
from st2common.router import Response
http_client = six.moves.http_client
LOG = logging.getLogger(__name__)
CAST_OVERRIDES = {
'array': (lambda cs_x: [v.strip() for v in cs_x.split(',')])
}
class ActionAliasExecutionController(BaseRestControllerMixin):
def match_and_execute(self, input_api, requester_user, show_secrets=False):
"""
Try to find a matching alias and if one is found, schedule a new
execution by parsing parameters from the provided command against
the matched alias.
Handles requests:
POST /aliasexecution/match_and_execute
"""
command = input_api.command
try:
format_ = get_matching_alias(command=command)
except ActionAliasAmbiguityException as e:
LOG.exception('Command "%s" matched (%s) patterns.', e.command, len(e.matches))
return abort(http_client.BAD_REQUEST, six.text_type(e))
action_alias_db = format_['alias']
representation = format_['representation']
params = {
'name': action_alias_db.name,
'format': representation,
'command': command,
'user': input_api.user,
'source_channel': input_api.source_channel,
}
# Add in any additional parameters provided by the user
if input_api.notification_channel:
params['notification_channel'] = input_api.notification_channel
if input_api.notification_route:
params['notification_route'] = input_api.notification_route
alias_execution_api = AliasMatchAndExecuteInputAPI(**params)
results = self._post(
payload=alias_execution_api,
requester_user=requester_user,
show_secrets=show_secrets,
match_multiple=format_['match_multiple'])
return Response(json={'results': results}, status=http_client.CREATED)
def _post(self, payload, requester_user, show_secrets=False, match_multiple=False):
action_alias_name = payload.name if payload else None
if not action_alias_name:
abort(http_client.BAD_REQUEST, 'Alias execution "name" is required')
return
if not requester_user:
requester_user = UserDB(cfg.CONF.system_user.user)
format_str = payload.format or ''
command = payload.command or ''
try:
action_alias_db = ActionAlias.get_by_name(action_alias_name)
except ValueError:
action_alias_db = None
if not action_alias_db:
msg = 'Unable to identify action alias with name "%s".' % (action_alias_name)
abort(http_client.NOT_FOUND, msg)
return
if not action_alias_db.enabled:
msg = 'Action alias with name "%s" is disabled.' % (action_alias_name)
abort(http_client.BAD_REQUEST, msg)
return
if match_multiple:
multiple_execution_parameters = extract_parameters_for_action_alias_db(
action_alias_db=action_alias_db,
format_str=format_str,
param_stream=command,
match_multiple=match_multiple)
else:
multiple_execution_parameters = [
extract_parameters_for_action_alias_db(
action_alias_db=action_alias_db,
format_str=format_str,
param_stream=command,
match_multiple=match_multiple)
]
notify = self._get_notify_field(payload)
context = {
'action_alias_ref': reference.get_ref_from_model(action_alias_db),
'api_user': payload.user,
'user': requester_user.name,
'source_channel': payload.source_channel,
}
inject_immutable_parameters(
action_alias_db=action_alias_db,
multiple_execution_parameters=multiple_execution_parameters,
action_context=context)
results = []
for execution_parameters in multiple_execution_parameters:
execution = self._schedule_execution(action_alias_db=action_alias_db,
params=execution_parameters,
notify=notify,
context=context,
show_secrets=show_secrets,
requester_user=requester_user)
result = {
'execution': execution,
'actionalias': ActionAliasAPI.from_model(action_alias_db)
}
if action_alias_db.ack:
try:
if 'format' in action_alias_db.ack:
message = render({'alias': action_alias_db.ack['format']}, result)['alias']
result.update({
'message': message
})
except UndefinedError as e:
result.update({
'message': ('Cannot render "format" in field "ack" for alias. ' +
six.text_type(e))
})
try:
if 'extra' in action_alias_db.ack:
result.update({
'extra': render(action_alias_db.ack['extra'], result)
})
except UndefinedError as e:
result.update({
'extra': ('Cannot render "extra" in field "ack" for alias. ' +
six.text_type(e))
})
results.append(result)
return results
def post(self, payload, requester_user, show_secrets=False):
results = self._post(payload, requester_user, show_secrets, match_multiple=False)
return Response(json=results[0], status=http_client.CREATED)
def _tokenize_alias_execution(self, alias_execution):
tokens = alias_execution.strip().split(' ', 1)
return (tokens[0], tokens[1] if len(tokens) > 1 else None)
def _get_notify_field(self, payload):
on_complete = NotificationSubSchema()
route = (getattr(payload, 'notification_route', None) or
getattr(payload, 'notification_channel', None))
on_complete.routes = [route]
on_complete.data = {
'user': payload.user,
'source_channel': payload.source_channel,
'source_context': getattr(payload, 'source_context', None),
}
notify = NotificationSchema()
notify.on_complete = on_complete
return notify
def _schedule_execution(self, action_alias_db, params, notify, context, requester_user,
show_secrets):
action_ref = action_alias_db.action_ref
action_db = action_utils.get_action_by_ref(action_ref)
if not action_db:
raise StackStormDBObjectNotFoundError('Action with ref "%s" not found ' % (action_ref))
rbac_utils = get_rbac_backend().get_utils_class()
permission_type = PermissionType.ACTION_EXECUTE
rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user,
resource_db=action_db,
permission_type=permission_type)
try:
# prior to shipping off the params cast them to the right type.
params = action_param_utils.cast_params(action_ref=action_alias_db.action_ref,
params=params,
cast_overrides=CAST_OVERRIDES)
if not context:
context = {
'action_alias_ref': reference.get_ref_from_model(action_alias_db),
'user': get_system_username()
}
liveaction = LiveActionDB(action=action_alias_db.action_ref, context=context,
parameters=params, notify=notify)
_, action_execution_db = action_service.request(liveaction)
mask_secrets = self._get_mask_secrets(requester_user, show_secrets=show_secrets)
return ActionExecutionAPI.from_model(action_execution_db, mask_secrets=mask_secrets)
except ValueError as e:
LOG.exception('Unable to execute action.')
abort(http_client.BAD_REQUEST, six.text_type(e))
except jsonschema.ValidationError as e:
LOG.exception('Unable to execute action. Parameter validation failed.')
abort(http_client.BAD_REQUEST, six.text_type(e))
except Exception as e:
LOG.exception('Unable to execute action. Unexpected error encountered.')
abort(http_client.INTERNAL_SERVER_ERROR, six.text_type(e))
action_alias_execution_controller = ActionAliasExecutionController()
| 42.660305 | 99 | 0.635501 |
0143ad70717d20ad015f62f56c24d5dc26acfa93 | 2,199 | py | Python | aydin/it/test/test_classic_saveload.py | royerloic/aydin | f9c61a24030891d008c318b250da5faec69fcd7d | [
"BSD-3-Clause"
] | null | null | null | aydin/it/test/test_classic_saveload.py | royerloic/aydin | f9c61a24030891d008c318b250da5faec69fcd7d | [
"BSD-3-Clause"
] | null | null | null | aydin/it/test/test_classic_saveload.py | royerloic/aydin | f9c61a24030891d008c318b250da5faec69fcd7d | [
"BSD-3-Clause"
] | null | null | null | import time
from os.path import join
import numpy
import pytest
from skimage.data import camera
from skimage.metrics import peak_signal_noise_ratio as psnr
from skimage.metrics import structural_similarity as ssim
from aydin.io.datasets import normalise, add_noise
from aydin.io.folders import get_temp_folder
from aydin.it.base import ImageTranslatorBase
from aydin.it.classic import ImageDenoiserClassic
@pytest.mark.heavy
def test_saveload_bilateral():
saveload("bilateral", min_psnr=16, min_ssim=0.30)
def test_saveload_gaussian():
saveload("gaussian", min_psnr=20, min_ssim=0.71)
@pytest.mark.heavy
def test_saveload_gm():
saveload("gm", min_psnr=20, min_ssim=0.65)
def test_saveload_lipschitz():
saveload("lipschitz", min_psnr=12, min_ssim=0.16)
# @pytest.mark.heavy
def test_saveload_pca():
saveload("pca", min_psnr=20, min_ssim=0.60)
@pytest.mark.heavy
def test_saveload_tv():
saveload("tv", min_psnr=20, min_ssim=0.73)
@pytest.mark.heavy
def test_saveload_wavelet():
saveload("wavelet", min_psnr=17, min_ssim=0.40)
def saveload(method, min_psnr=22, min_ssim=0.75):
image = normalise(camera().astype(numpy.float32))
noisy = add_noise(image)
it = ImageDenoiserClassic(method=method)
it.train(noisy, noisy)
temp_file = join(get_temp_folder(), "test_it_saveload" + str(time.time()))
print(temp_file)
it.save(temp_file)
del it
loaded_it = ImageTranslatorBase.load(temp_file)
denoised = loaded_it.translate(noisy)
denoised = denoised.clip(0, 1)
psnr_noisy = psnr(noisy, image)
ssim_noisy = ssim(noisy, image)
print("noisy", psnr_noisy, ssim_noisy)
psnr_denoised = psnr(denoised, image)
ssim_denoised = ssim(denoised, image)
print("denoised", psnr_denoised, ssim_denoised)
assert psnr_denoised > psnr_noisy and ssim_denoised > ssim_noisy
assert psnr_denoised > psnr_noisy and ssim_denoised > ssim_noisy
# if the line below fails, then the parameters of the image the lgbm regressohave been broken.
# do not change the number below, but instead, fix the problem -- most likely a parameter.
assert psnr_denoised > min_psnr and ssim_denoised > min_ssim
| 26.493976 | 100 | 0.744429 |
cf3d1f36d1734149e051952b484523a39541b79a | 1,417 | py | Python | server/util.py | markbirds/Anime-Recommendation-System | cef5b21ff2feb17a165fc23bcdd314b1880c2cd6 | [
"MIT"
] | null | null | null | server/util.py | markbirds/Anime-Recommendation-System | cef5b21ff2feb17a165fc23bcdd314b1880c2cd6 | [
"MIT"
] | null | null | null | server/util.py | markbirds/Anime-Recommendation-System | cef5b21ff2feb17a165fc23bcdd314b1880c2cd6 | [
"MIT"
] | 1 | 2020-12-31T04:31:25.000Z | 2020-12-31T04:31:25.000Z | from scipy.sparse import load_npz
import pandas as pd
import pickle
import json
anime_list = pd.read_pickle('server/files/anime_list.pickle')
csr_rating_matrix = load_npz('server/files/csr_rating_matrix.npz')
with open('server/files/rating_matrix_anime_id.json') as f:
anime_id = json.load(f)
with open('server/files/rating_matrix_user_id.json') as f:
user_id = json.load(f)
rating_matrix_pearson = pd.DataFrame(csr_rating_matrix.toarray(),index=user_id['user_id'],columns=anime_id['anime_id'])
def return_anime_list():
return list(anime_list['name'].values)
def recommend_pearson(anime,filter):
try:
user_anime = anime_list[anime_list['name']==anime]
user_anime_ratings = rating_matrix_pearson[int(user_anime['anime_id'])]
correlated_anime = rating_matrix_pearson.corrwith(user_anime_ratings).reset_index().rename(columns={0:'Correlation','index':'anime_id'})
recommended_anime = pd.merge(correlated_anime,anime_list,on='anime_id',how='left')
if filter != 'Select all':
return recommended_anime[recommended_anime['type']==filter].sort_values(by='Correlation',ascending=False).head(21).iloc[1:].drop(columns=['anime_id','rating_count','Correlation','members']).to_dict()
return recommended_anime.sort_values(by='Correlation',ascending=False).head(21).iloc[1:].drop(columns=['anime_id','rating_count','Correlation','members']).to_dict()
except:
return {}
| 44.28125 | 205 | 0.757234 |
3da678df57de9772574a067cbc90568e67c7f9fd | 3,254 | py | Python | python/phonenumbers/data/region_MM.py | Eyepea/python-phonenumbers | 0336e191fda80a21ed5c19d5e029ad8c70f620ee | [
"Apache-2.0"
] | 2 | 2019-03-30T02:12:54.000Z | 2021-03-08T18:59:40.000Z | python/phonenumbers/data/region_MM.py | Eyepea/python-phonenumbers | 0336e191fda80a21ed5c19d5e029ad8c70f620ee | [
"Apache-2.0"
] | null | null | null | python/phonenumbers/data/region_MM.py | Eyepea/python-phonenumbers | 0336e191fda80a21ed5c19d5e029ad8c70f620ee | [
"Apache-2.0"
] | 1 | 2018-11-10T03:47:34.000Z | 2018-11-10T03:47:34.000Z | """Auto-generated file, do not edit by hand. MM metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_MM = PhoneMetadata(id='MM', country_code=95, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[14578]\\d{5,7}|[26]\\d{5,8}|9(?:[258]|3\\d|4\\d{1,2}|[679]\\d?)\\d{6}', possible_number_pattern='\\d{5,10}'),
fixed_line=PhoneNumberDesc(national_number_pattern='1(?:2\\d{1,2}|[3-5]\\d|6\\d?|[89][0-6]\\d)\\d{4}|2(?:[236-9]\\d{4}|4(?:0\\d{5}|\\d{4})|5(?:1\\d{3,6}|[02-9]\\d{3,5}))|4(?:2[245-8]|[346][2-6]|5[3-5])\\d{4}|5(?:2(?:20?|[3-8])|3[2-68]|4(?:21?|[4-8])|5[23]|6[2-4]|7[2-8]|8[24-7]|9[2-7])\\d{4}|6(?:0[23]|1[2356]|[24][2-6]|3[24-6]|5[2-4]|6[2-8]|7(?:[2367]|4\\d|5\\d?|8[145]\\d)|8[245]|9[24])\\d{4}|7(?:[04][24-8]|[15][2-7]|22|3[2-4])\\d{4}|8(?:1(?:2\\d?|[3-689])|2[2-8]|3[24]|4[24-7]|5[245]|6[23])\\d{4}', possible_number_pattern='\\d{5,9}', example_number='1234567'),
mobile=PhoneNumberDesc(national_number_pattern='17[01]\\d{4}|9(?:2[0-4]|3[136]\\d|4(?:0[0-4]\\d|[1379]\\d|[24][0-589]\\d|5\\d{2}|88)|5[0-6]|61?\\d|73\\d|8\\d|9(?:1\\d|[089]))\\d{5}', possible_number_pattern='\\d{7,10}', example_number='92123456'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='1333\\d{4}', possible_number_pattern='\\d{8}', example_number='13331234'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(\\d)(\\d{3})(\\d{3,4})', format=u'\\1 \\2 \\3', leading_digits_pattern=['1|2[45]'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(2)(\\d{4})(\\d{4})', format=u'\\1 \\2 \\3', leading_digits_pattern=['251'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(\\d)(\\d{2})(\\d{3})', format=u'\\1 \\2 \\3', leading_digits_pattern=['16|2'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(\\d{2})(\\d{3})(\\d{3,4})', format=u'\\1 \\2 \\3', leading_digits_pattern=['67|81'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{3,4})', format=u'\\1 \\2 \\3', leading_digits_pattern=['[4-8]'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(9)(\\d{3})(\\d{4,5})', format=u'\\1 \\2 \\3', leading_digits_pattern=['9(?:[235-9]|4[13789])'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(9)(4\\d{4})(\\d{4})', format=u'\\1 \\2 \\3', leading_digits_pattern=['94[0245]'], national_prefix_formatting_rule=u'0\\1')])
| 125.153846 | 569 | 0.656423 |
67454b8467fb827bec805e27d6033c911135736a | 2,400 | py | Python | tests/modules/test_GUIScreen.py | IamSaurabh1/taurus | 928d44e30e6cd5b979e675bfdce4c1dbeb5d0eff | [
"Apache-2.0"
] | 1 | 2021-09-15T05:27:22.000Z | 2021-09-15T05:27:22.000Z | tests/modules/test_GUIScreen.py | IamSaurabh1/taurus | 928d44e30e6cd5b979e675bfdce4c1dbeb5d0eff | [
"Apache-2.0"
] | null | null | null | tests/modules/test_GUIScreen.py | IamSaurabh1/taurus | 928d44e30e6cd5b979e675bfdce4c1dbeb5d0eff | [
"Apache-2.0"
] | null | null | null | import time
from urwid.canvas import Canvas
from bzt import ManualShutdown
from bzt.modules.console import TaurusConsole
from bzt.utils import is_linux
from tests import BZTestCase
from bzt.modules.screen import GUIScreen as Screen
class TestCanvas(Canvas):
def __init__(self, value):
super(TestCanvas, self).__init__()
self.value = value
def content(self, trim_left=0, trim_top=0, cols=None, rows=None, attr=None):
for val in self.value:
yield val
def rows(self):
pass
def content_delta(self):
pass
def cols(self):
pass
class TestGUIScreen(BZTestCase):
def test_draw_screen(self):
lines = [((x[0], None, "%s\n" % x[0]),) for x in TaurusConsole.palette]
canvas = TestCanvas(lines)
obj = Screen()
"""
:type: bzt.modules.screen.GUIScreen
"""
obj.register_palette(TaurusConsole.palette)
obj.start()
for _ in range(1, 10):
obj.draw_screen((1, 1), canvas)
time.sleep(0.1)
if hasattr(obj, 'font'):
old_font_size = 10
obj.font['size'] = old_font_size
self.assertGreater(old_font_size, 0)
if is_linux():
obj.root.event_generate("<Control-4>")
else:
obj.root.event_generate("<Control-MouseWheel>", delta=120)
self.assertGreater(obj.font['size'], old_font_size)
if is_linux():
obj.root.event_generate("<Control-5>")
else:
obj.root.event_generate("<Control-MouseWheel>", delta=-120)
self.assertEqual(obj.font['size'], old_font_size)
obj.stop()
def test_window_closed(self):
lines = [((x[0], None, "%s\n" % x[0]),) for x in TaurusConsole.palette]
canvas = TestCanvas(lines)
obj = Screen()
obj.register_palette(TaurusConsole.palette)
obj.start()
for _ in range(5):
obj.draw_screen((1, 1), canvas)
time.sleep(0.1)
# closing the window
obj.closed_window()
# first call to draw_screen should raise ManualShutdown
self.assertRaises(ManualShutdown, obj.draw_screen, (1, 1), canvas)
# consecutive calls to draw_screen shouldn't raise
obj.draw_screen((1, 1), canvas)
obj.draw_screen((1, 1), canvas)
| 27.906977 | 80 | 0.590417 |
70f48d97f38fb4dc3bd7b28fad387b7f00dd97bf | 4,729 | py | Python | openstack_dashboard/dashboards/admin/hypervisors/compute/tables.py | TechBK/horizon-dev | db426842116af515f00b9e7c887f4f3f9f28298c | [
"Apache-2.0"
] | 1 | 2017-12-07T05:21:58.000Z | 2017-12-07T05:21:58.000Z | openstack_dashboard/dashboards/admin/hypervisors/compute/tables.py | tmrts/horizon | c502d69290218c6ec9399ba40a9bccb3f8bc64ae | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/admin/hypervisors/compute/tables.py | tmrts/horizon | c502d69290218c6ec9399ba40a9bccb3f8bc64ae | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.template import defaultfilters as filters
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import tables
from horizon.utils import filters as utils_filters
from openstack_dashboard import api
from openstack_dashboard import policy
class EvacuateHost(tables.LinkAction):
name = "evacuate"
verbose_name = _("Evacuate Host")
url = "horizon:admin:hypervisors:compute:evacuate_host"
classes = ("ajax-modal", "btn-migrate")
policy_rules = (("compute", "compute_extension:evacuate"),)
def __init__(self, **kwargs):
super(EvacuateHost, self).__init__(**kwargs)
self.name = kwargs.get('name', self.name)
def allowed(self, request, instance):
if not api.nova.extension_supported('AdminActions', request):
return False
return self.datum.state == "down"
class DisableService(policy.PolicyTargetMixin, tables.LinkAction):
name = "disable"
verbose_name = _("Disable Service")
url = "horizon:admin:hypervisors:compute:disable_service"
classes = ("ajax-modal", "btn-confirm")
policy_rules = (("compute", "compute_extension:services"),)
def allowed(self, request, service):
if not api.nova.extension_supported('AdminActions', request):
return False
return service.status == "enabled"
class EnableService(policy.PolicyTargetMixin, tables.BatchAction):
name = "enable"
policy_rules = (("compute", "compute_extension:services"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Enable Service",
u"Enable Services",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Enabled Service",
u"Enabled Services",
count
)
def allowed(self, request, service):
if not api.nova.extension_supported('AdminActions', request):
return False
return service.status == "disabled"
def action(self, request, obj_id):
api.nova.service_enable(request, obj_id, 'nova-compute')
class MigrateMaintenanceHost(tables.LinkAction):
name = "migrate_maintenance"
policy_rules = (("compute", "compute_extension:admin_actions:migrate"),)
classes = ('ajax-modal', 'btn-migrate', 'btn-danger')
verbose_name = _("Migrate Host")
url = "horizon:admin:hypervisors:compute:migrate_host"
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Migrate Host",
u"Migrate Hosts",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Migrated Host",
u"Migrated Hosts",
count
)
def allowed(self, request, service):
if not api.nova.extension_supported('AdminActions', request):
return False
return service.status == "disabled"
class ComputeHostFilterAction(tables.FilterAction):
def filter(self, table, services, filter_string):
q = filter_string.lower()
return filter(lambda service: q in service.host.lower(), services)
class ComputeHostTable(tables.DataTable):
host = tables.Column('host', verbose_name=_('Host'))
zone = tables.Column('zone', verbose_name=_('Zone'))
status = tables.Column('status', verbose_name=_('Status'))
state = tables.Column('state', verbose_name=_('State'))
updated_at = tables.Column('updated_at',
verbose_name=_('Updated At'),
filters=(utils_filters.parse_isotime,
filters.timesince))
def get_object_id(self, obj):
return obj.host
def get_object_display(self, obj):
return obj.host
class Meta(object):
name = "compute_host"
verbose_name = _("Compute Host")
table_actions = (ComputeHostFilterAction,)
multi_select = False
row_actions = (
EvacuateHost,
DisableService,
EnableService,
MigrateMaintenanceHost
)
| 31.526667 | 76 | 0.654895 |
7f9211bf01feb605297461318706425773071651 | 7,568 | py | Python | dm/netdev.py | DentonGentry/gfiber-catawampus | b01e4444f3c7f12b1af7837203b37060fd443bb7 | [
"Apache-2.0"
] | 2 | 2017-10-03T16:06:29.000Z | 2020-09-08T13:03:13.000Z | dm/netdev.py | DentonGentry/gfiber-catawampus | b01e4444f3c7f12b1af7837203b37060fd443bb7 | [
"Apache-2.0"
] | null | null | null | dm/netdev.py | DentonGentry/gfiber-catawampus | b01e4444f3c7f12b1af7837203b37060fd443bb7 | [
"Apache-2.0"
] | 1 | 2017-05-07T17:39:02.000Z | 2017-05-07T17:39:02.000Z | #!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TR-069 has mandatory attribute names that don't comply with policy
# pylint:disable=invalid-name
"""Implementation of network device support used in a number of data models."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import tr.cwmptypes
import tr.session
# Unit tests can override this.
PROC_NET_DEV = '/proc/net/dev'
class NetdevStatsLinux26(object):
"""Parses /proc/net/dev to populate Stats objects in several TRs."""
# Fields in /proc/net/dev
_RX_BYTES = 0
_RX_PKTS = 1
_RX_ERRS = 2
_RX_DROP = 3
_RX_FIFO = 4
_RX_FRAME = 5
_RX_COMPRESSED = 6
_RX_MCAST = 7
_TX_BYTES = 8
_TX_PKTS = 9
_TX_ERRS = 10
_TX_DROP = 11
_TX_FIFO = 12
_TX_COLLISIONS = 13
_TX_CARRIER = 14
_TX_COMPRESSED = 15
BroadcastPacketsReceived = tr.cwmptypes.ReadOnlyUnsigned(0)
BroadcastPacketsSent = tr.cwmptypes.ReadOnlyUnsigned(0)
MulticastPacketsSent = tr.cwmptypes.ReadOnlyUnsigned(0)
UnknownProtoPacketsReceived = tr.cwmptypes.ReadOnlyUnsigned(0)
def __init__(self, ifname, qfiles=None, numq=0, hipriq=0):
"""Parse fields from a /proc/net/dev line.
Args:
ifname: string name of the interface, like "eth0"
qfiles: path to per-queue discard count files
numq: number of per-queue discard files to look for
hipriq: number of qfiles to include in DiscardPacketsReceivedHipri
"""
self.ifname = ifname
self.qfiles = qfiles
self.numq = numq
self.hipriq = hipriq
self.bytes_received = 0L
self.bytes_sent = 0L
self.discards_received = 0L
self.discards_sent = 0L
self.errors_received = 0L
self.errors_sent = 0L
self.mcast_received = 0L
self.pkts_received = 0L
self.pkts_sent = 0L
self.old_ifstats = [0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L,
0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L]
def Delta(self, new, old):
"""Return the delta between two counter values."""
if old <= new:
return new - old
else:
return 0xffffffffL - old + new
@property
def BytesReceived(self):
ifstats = self._ReadProcNetDev(self.ifname)
i = self._RX_BYTES
self.bytes_received += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
return self.bytes_received
@property
def BytesSent(self):
ifstats = self._ReadProcNetDev(self.ifname)
i = self._TX_BYTES
self.bytes_sent += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
return self.bytes_sent
@property
def DiscardPacketsReceived(self):
ifstats = self._ReadProcNetDev(self.ifname)
i = self._RX_DROP
self.discards_received += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
i = self._RX_FIFO
self.discards_received += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
return self.discards_received
@property
def DiscardPacketsSent(self):
ifstats = self._ReadProcNetDev(self.ifname)
i = self._TX_DROP
self.discards_sent += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
return self.discards_sent
@property
def ErrorsReceived(self):
ifstats = self._ReadProcNetDev(self.ifname)
i = self._RX_ERRS
self.errors_received += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
i = self._RX_FRAME
self.errors_received += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
return self.errors_received
@property
def ErrorsSent(self):
ifstats = self._ReadProcNetDev(self.ifname)
i = self._TX_ERRS
self.errors_sent += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
i = self._TX_FIFO
self.errors_sent += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
return self.errors_sent
@property
def MulticastPacketsReceived(self):
ifstats = self._ReadProcNetDev(self.ifname)
i = self._RX_MCAST
self.mcast_received += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
return self.mcast_received
@property
def PacketsReceived(self):
ifstats = self._ReadProcNetDev(self.ifname)
i = self._RX_PKTS
self.pkts_received += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
return self.pkts_received
@property
def PacketsSent(self):
ifstats = self._ReadProcNetDev(self.ifname)
i = self._TX_PKTS
self.pkts_sent += self.Delta(ifstats[i], self.old_ifstats[i])
self.old_ifstats[i] = ifstats[i]
return self.pkts_sent
@property
def UnicastPacketsSent(self):
return self.PacketsSent
@property
def UnicastPacketsReceived(self):
uni = self.PacketsReceived - self.MulticastPacketsReceived
if uni < 0:
# b/12022359 would try to set UnicastPacketsReceived negative, and result
# in a ValueError. That shouldn't happen any more now that counters
# are 64 bit, but just in case we check for it here. This is the only
# stat involving subtraction.
uni = 0
return uni
@property
def X_CATAWAMPUS_ORG_DiscardFrameCnts(self):
return self._ReadDiscardStats(self.qfiles, self.numq)
@property
def X_CATAWAMPUS_ORG_DiscardPacketsReceivedHipri(self):
return self._GetHighPrioDiscards(self.X_CATAWAMPUS_ORG_DiscardFrameCnts,
self.hipriq)
@tr.session.cache
def _ReadProcNetDev(self, ifname):
"""Return the /proc/net/dev entry for ifname.
Args:
ifname: string name of the interface, e.g.: "eth0"
Returns:
The /proc/net/dev entry for ifname as a list.
"""
with open(PROC_NET_DEV) as f:
for line in f:
fields = line.split(':')
if (len(fields) == 2) and (fields[0].strip() == ifname):
ifstats = fields[1].split()
return [long(x) for x in ifstats]
return None
@tr.session.cache
def _ReadDiscardStats(self, qfiles, numq):
"""Return the discard counters for ifname.
Args:
qfiles: path to per-queue discard count files
numq: number of per-queue discard files to look for
Returns:
A list of all the values in the qfiles, where index
ranges from 0 to numq (there is a different counter
for each queue).
"""
discard_cnts = []
for i in range(numq):
try:
file_path = qfiles % i
with open(file_path) as f:
val = long(f.readline().strip())
discard_cnts.append(val)
except (IOError, ValueError, TypeError):
print 'WARN: _ReadDiscardStats %r failed' % (file_path,)
discard_cnts.append(0)
return discard_cnts
def _GetHighPrioDiscards(self, discards, hipriq):
"""Return sum of discards[0:hipriq]."""
total = 0L
for i in range(hipriq):
try:
total += long(discards[i])
except (IndexError, ValueError):
continue
return total
| 30.889796 | 79 | 0.685254 |
e379549aa63e5685bcba083be9e1ba79bb1952a8 | 111 | py | Python | ncsnv2/runners/__init__.py | jmyoon1/adp | 497e2d40a699b220a6662187865dbaa2ac6f2d62 | [
"MIT"
] | 5 | 2021-06-15T16:09:57.000Z | 2021-12-26T14:06:13.000Z | ncsnv2/runners/__init__.py | jmyoon1/adp | 497e2d40a699b220a6662187865dbaa2ac6f2d62 | [
"MIT"
] | 2 | 2021-08-19T06:19:00.000Z | 2021-08-19T06:27:21.000Z | ncsnv2/runners/__init__.py | jmyoon1/adp | 497e2d40a699b220a6662187865dbaa2ac6f2d62 | [
"MIT"
] | 1 | 2022-02-23T13:17:40.000Z | 2022-02-23T13:17:40.000Z |
# For generating models
#from runners.ncsn_runner import *
# For adp
from ncsnv2.runners.ncsn_runner import *
| 18.5 | 40 | 0.783784 |
11151ef7666dcaec889aa6e610e2442d73e2ccf0 | 356 | py | Python | frappe/www/error.py | ssuda777/frappe | d3f3df2ce15154aecc1d9d6d07d947e72c2e8c6e | [
"MIT"
] | 1 | 2021-12-18T18:37:29.000Z | 2021-12-18T18:37:29.000Z | frappe/www/error.py | JMBodz/frappe | eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d | [
"MIT"
] | 3 | 2021-02-27T11:50:14.000Z | 2021-05-03T06:48:49.000Z | frappe/www/error.py | JMBodz/frappe | eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d | [
"MIT"
] | 2 | 2021-09-02T09:51:55.000Z | 2021-09-07T04:55:42.000Z | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
no_cache = 1
def get_context(context):
if frappe.flags.in_migrate: return
context.http_status_code = 500
print(frappe.get_traceback().encode("utf-8"))
return {"error": frappe.get_traceback().replace("<", "<").replace(">", ">") }
| 27.384615 | 84 | 0.716292 |
ca70d0198aa288b34300e6e693008383859e69d3 | 2,755 | py | Python | src/cms/contacts/tests.py | UniversitaDellaCalabria/uniCMS | b0af4e1a767867f0a9b3c135a5c84587e713cb71 | [
"Apache-2.0"
] | 6 | 2021-01-26T17:22:53.000Z | 2022-02-15T10:09:03.000Z | src/cms/contacts/tests.py | UniversitaDellaCalabria/uniCMS | b0af4e1a767867f0a9b3c135a5c84587e713cb71 | [
"Apache-2.0"
] | 5 | 2020-12-24T14:29:23.000Z | 2021-08-10T10:32:18.000Z | src/cms/contacts/tests.py | UniversitaDellaCalabria/uniCMS | b0af4e1a767867f0a9b3c135a5c84587e713cb71 | [
"Apache-2.0"
] | 2 | 2020-12-24T14:13:39.000Z | 2020-12-30T16:48:52.000Z | import logging
from django.test import TestCase
from . models import Contact, ContactInfo, ContactInfoLocalization, ContactLocalization
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class ContactUnitTest(TestCase):
def setUp(cls):
pass
@classmethod
def create_contact(cls, **kwargs):
data = {'name': 'john doe',
'description': 'employee of the year',
'contact_type': 'person',
'is_active': 1}
for k,v in kwargs.items():
data[k] = v
obj = Contact.objects.create(**data)
return obj
@classmethod
def create_contact_info(cls, contact_data={}, **kwargs):
if contact_data:
contact = cls.create_contact(**contact_data)
else:
contact = cls.create_contact()
data = {'contact': contact,
'label': 'first email',
'value': 'john.doe@email.com',
'info_type': 'email',
'is_active': 1}
for k,v in kwargs.items():
data[k] = v
obj = ContactInfo.objects.create(**data)
return obj
@classmethod
def create_contact_localization(cls,
contact_data={},
**kwargs):
contact = cls.create_contact(**contact_data)
data = {'contact': contact,
'language': 'en',
'name': 'john doe en',
'description': '',
'is_active': 1}
for k,v in kwargs.items():
data[k] = v
obj = ContactLocalization.objects.create(**data)
return obj
@classmethod
def create_contact_info_localization(cls,
contact_data={},
contact_info_data={},
**kwargs):
contact_info = cls.create_contact_info(contact_data=contact_data,
**contact_info_data)
data = {'contact_info': contact_info,
'language': 'en',
'label': 'first email en',
'value': 'john.doe.en@email.com',
'is_active': 1}
for k,v in kwargs.items():
data[k] = v
obj = ContactInfoLocalization.objects.create(**data)
return obj
def test_contact_localization(self):
contact_info_localization = self.create_contact_info_localization()
contact_info_localization.__str__()
contact_info = contact_info_localization.contact_info
contact = contact_info.contact
contact.__str__()
contact.localized()
contact.get_infos()
| 30.955056 | 87 | 0.528494 |
6327e265d8c1e66b93e37d63d430874b3e147d05 | 1,058 | py | Python | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/tests/series/test_validate.py | jeikabu/lumberyard | 07228c605ce16cbf5aaa209a94a3cb9d6c1a4115 | [
"AML"
] | 18 | 2018-02-23T11:28:54.000Z | 2021-09-23T08:19:54.000Z | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/tests/series/test_validate.py | jeikabu/lumberyard | 07228c605ce16cbf5aaa209a94a3cb9d6c1a4115 | [
"AML"
] | 2 | 2021-02-08T20:19:17.000Z | 2021-04-30T20:32:52.000Z | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/tests/series/test_validate.py | jeikabu/lumberyard | 07228c605ce16cbf5aaa209a94a3cb9d6c1a4115 | [
"AML"
] | 12 | 2017-05-23T06:01:12.000Z | 2021-08-16T05:09:46.000Z | import pytest
from pandas.core.series import Series
class TestSeriesValidate(object):
"""Tests for error handling related to data types of method arguments."""
s = Series([1, 2, 3, 4, 5])
def test_validate_bool_args(self):
# Tests for error handling related to boolean arguments.
invalid_values = [1, "True", [1, 2, 3], 5.0]
for value in invalid_values:
with pytest.raises(ValueError):
self.s.reset_index(inplace=value)
with pytest.raises(ValueError):
self.s._set_name(name='hello', inplace=value)
with pytest.raises(ValueError):
self.s.sort_values(inplace=value)
with pytest.raises(ValueError):
self.s.sort_index(inplace=value)
with pytest.raises(ValueError):
self.s.sort_index(inplace=value)
with pytest.raises(ValueError):
self.s.rename(inplace=value)
with pytest.raises(ValueError):
self.s.dropna(inplace=value)
| 31.117647 | 77 | 0.603025 |
a9850ec6a6cb50f259990398c7c6201c7a3cb78f | 594 | py | Python | asgi_webdav/docker.py | hansnow/asgi-webdav | 5a353a25182ea75372d1f2d004d52707729bb3c2 | [
"MIT"
] | 1 | 2021-05-16T04:58:08.000Z | 2021-05-16T04:58:08.000Z | asgi_webdav/docker.py | liuwentao/asgi-webdav | 5a353a25182ea75372d1f2d004d52707729bb3c2 | [
"MIT"
] | null | null | null | asgi_webdav/docker.py | liuwentao/asgi-webdav | 5a353a25182ea75372d1f2d004d52707729bb3c2 | [
"MIT"
] | null | null | null | from asgi_webdav.config import create_config_from_file
from asgi_webdav.webdav import WebDAV
from asgi_webdav.middleware.http_basic_and_digest_auth import (
HTTPAuthMiddleware,
)
# init config
config = create_config_from_file()
# create ASGI app
app = WebDAV(config, in_docker=True)
# config auth
app = HTTPAuthMiddleware(app, username=config.username, password=config.password)
# config sentry
if config.sentry_dsn:
import sentry_sdk
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
sentry_sdk.init(dsn=config.sentry_dsn)
app = SentryAsgiMiddleware(app)
| 24.75 | 81 | 0.80303 |
4c9f192da9c73ec6ee2e242728ba500dac62bb46 | 6,691 | py | Python | tests/test_client_0_1.py | abkfenris/gage-client | 8a7600b553ab9e2ede8cb8bab7ce654186143536 | [
"MIT"
] | null | null | null | tests/test_client_0_1.py | abkfenris/gage-client | 8a7600b553ab9e2ede8cb8bab7ce654186143536 | [
"MIT"
] | 8 | 2015-03-14T22:58:23.000Z | 2015-04-25T22:51:31.000Z | tests/test_client_0_1.py | abkfenris/gage-client | 8a7600b553ab9e2ede8cb8bab7ce654186143536 | [
"MIT"
] | null | null | null | import unittest
import json
import responses
import requests
from itsdangerous import JSONWebSignatureSerializer, BadSignature
from datetime import datetime as dt
from gage_client import Client
from gage_client.client import Client_0_1, AuthenticationError, SendError
password = 'password'
url_stub = 'http://riverflo.ws/api/0.1/'
gage_id = 5
s = JSONWebSignatureSerializer(password)
url = url_stub + 'gages/' + str(gage_id) + '/sample'
bad_url = 'http://riverflo.ws'
bad_password = 'badpassword'
def client_0_1_response_callback(request):
try:
payload = s.loads(request.body)
except BadSignature:
print('Bad Signature')
output = {'error': 'unauthorized',
'message': 'bad signature'}
return (401, {}, json.dumps(output))
samples = payload['samples']
output_samples = []
count = 0
for sample in samples:
result_json = {
'datetime': sample['datetime'],
'id ': count,
'sender_id': sample['sender_id'],
'url': 'http://example.com/api/0.1/samples/(count)'.format(count=count),
'value': sample['value']
}
output_samples.append(result_json)
resp_body = {'gage': {'id': payload['gage']['id']},
'result': 'created',
'samples': output_samples}
return (200, {}, json.dumps(resp_body))
def client_0_1_partial_callback(request):
try:
payload = s.loads(request.body)
except BadSignature:
print('Bad Signature')
output = {'error': 'unauthorized',
'message': 'bad signature'}
return (401, {}, json.dumps(output))
samples = payload['samples'][::2]
output_samples = []
count = 0
for sample in samples:
result_json = {
'datetime': sample['datetime'],
'id ': count,
'sender_id': sample['sender_id'],
'url': 'http://example.com/api/0.1/samples/(count)'.format(count=count),
'value': sample['value']
}
output_samples.append(result_json)
resp_body = {'gage': {'id': payload['gage']['id']},
'result': 'created',
'samples': output_samples}
return (200, {}, json.dumps(resp_body))
class Test_Client_0_1(unittest.TestCase):
"""
Basic tests of Client_0_1
"""
def setUp(self):
responses.reset()
self.client = Client(url, gage_id, password)
def testVersion(self):
self.assertEqual(type(self.client), Client_0_1)
def testReading(self):
datetime = str(dt.now())
sensor = 'level'
value = 4.2
self.client.reading(sensor, datetime, value)
self.assertEquals(len(self.client.samples), 1)
print(self.client.samples[0])
self.assertEquals(self.client.samples[0]['type'], sensor)
self.assertEquals(self.client.samples[0]['value'], value)
self.assertEquals(self.client.samples[0]['datetime'], datetime)
self.assertEquals(len(self.client.readings()), 1)
@responses.activate
def testSend_All(self):
responses.add_callback(
responses.POST, url,
callback=client_0_1_response_callback,
content_type='application/json'
)
self.client.reading('level', str(dt.now()), 4.2)
self.client.reading('ampherage', str(dt.now()), 375.3)
self.client.send_all()
class Test_Client_0_1_Partial(Test_Client_0_1):
"""
Test when a server can only process a few of the responses sent
"""
@responses.activate
def testSend_All(self):
responses.add_callback(
responses.POST, url,
callback=client_0_1_partial_callback,
content_type='application/json'
)
self.client.reading('level', str(dt.now()), 4.2)
self.client.reading('ampherage', str(dt.now()), 375.3)
self.assertRaises(SendError, self.client.send_all)
class Test_Client_0_1_Ids(Test_Client_0_1):
"""
Checks that the client can make readings with non sequential id numbers
"""
def testReading(self):
datetime = str(dt.now())
sensor = 'level'
value = 4.2
self.client.reading(sensor, datetime, value, id=1)
self.assertEquals(len(self.client.samples), 1)
print(self.client.samples[0])
self.assertEquals(self.client.samples[0]['type'], sensor)
self.assertEquals(self.client.samples[0]['value'], value)
self.assertEquals(self.client.samples[0]['datetime'], datetime)
class Test_Client_0_1_BadPassword(Test_Client_0_1):
"""
Checks that the server sends a 401 response and that the client raises an
Authentication error
"""
def setUp(self):
responses.reset()
self.client = Client(url, gage_id, bad_password)
@responses.activate
def testSend_All(self):
responses.add_callback(
responses.POST, url,
callback=client_0_1_response_callback,
content_type='application/json'
)
self.client.reading('level', str(dt.now()), 4.2)
self.client.reading('ampherage', str(dt.now()), 375.3)
self.assertRaises(AuthenticationError, self.client.send_all)
class Test_Client_0_1_BadEndpoint(Test_Client_0_1):
"""
Test when the client is given a bad endpoint
"""
def setUp(self):
responses.reset()
self.client = Client(bad_url, gage_id, password)
def testVersion(self):
self.assertNotEqual(self.client, Client_0_1)
testReading = None
testSend_All = None
class Test_Client_0_1_MalformedResponse(Test_Client_0_1):
"""
Test when the server returns something completely random and useless
"""
@responses.activate
def testSend_All(self):
responses.add(
responses.POST, url,
body='Error message', status=404,
content_type='application/json'
)
self.client.reading('level', str(dt.now()), 4.2)
self.client.reading('ampherage', str(dt.now()), 375.3)
self.assertRaises(SendError, self.client.send_all)
class Test_Client_0_1_404Response(Test_Client_0_1):
"""
Test when the server returns a 404
"""
@responses.activate
def testSend_All(self):
responses.add(
responses.POST, url,
body='{"error": "not found"}', status=404,
content_type='application/json'
)
self.client.reading('level', str(dt.now()), 4.2)
self.client.reading('ampherage', str(dt.now()), 375.3)
self.assertRaises(SendError, self.client.send_all)
if __name__ == '__main__':
unittest.main()
| 31.413146 | 84 | 0.621581 |
909c8c8e1cce75459690cff135b1da7ad6a90d71 | 2,048 | py | Python | tests/test_generic.py | MuckHub/browser-history | 467dafce7caf32668f69199edc4e4018c8403bba | [
"Apache-2.0"
] | 2 | 2020-12-13T05:53:37.000Z | 2020-12-22T00:40:09.000Z | tests/test_generic.py | MuckHub/browser-history | 467dafce7caf32668f69199edc4e4018c8403bba | [
"Apache-2.0"
] | 27 | 2021-05-10T09:52:18.000Z | 2022-03-28T15:10:56.000Z | tests/test_generic.py | MuckHub/browser-history | 467dafce7caf32668f69199edc4e4018c8403bba | [
"Apache-2.0"
] | 1 | 2022-02-10T10:16:20.000Z | 2022-02-10T10:16:20.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=protected-access
"""test for generic module."""
from datetime import datetime
import pytest
from browser_history import generic
def test_outputs_init():
"""test Outputs init"""
obj = generic.Outputs("history")
assert not obj.histories
assert obj.format_map
assert obj.field_map
@pytest.mark.parametrize(
"entries, exp_res",
[
[[], "Timestamp,URL\r\n"],
[
[
[datetime(2020, 1, 1), "https://google.com"],
[datetime(2020, 1, 1), "https://example.com"],
],
"Timestamp,URL\r\n"
"2020-01-01 00:00:00,https://google.com\r\n"
"2020-01-01 00:00:00,https://example.com\r\n",
],
],
)
def test_output_to_csv(entries, exp_res):
"""test Outputs.to_csv"""
obj = generic.Outputs("history")
obj.histories.extend(entries)
assert obj.to_csv() == exp_res
@pytest.mark.parametrize(
"entries, exp_res",
[
[[], []],
[
[
[datetime(2020, 1, 1), "https://google.com"],
[datetime(2020, 1, 1), "https://google.com/imghp?hl=EN"],
[datetime(2020, 1, 1), "https://example.com"],
],
[
(
"google.com",
[
[datetime(2020, 1, 1, 0, 0), "https://google.com"],
[
datetime(2020, 1, 1, 0, 0),
"https://google.com/imghp?hl=EN",
],
],
),
(
"example.com",
[[datetime(2020, 1, 1, 0, 0), "https://example.com"]],
),
],
],
],
)
def test_output_sort_domain(entries, exp_res):
"""test Outputs.sort_domain"""
obj = generic.Outputs("history")
obj.histories.extend(entries)
assert list(obj.sort_domain().items()) == exp_res
| 27.306667 | 75 | 0.468262 |
1eb8963d5d3222460bd283feb2a1079f839188d0 | 15,512 | py | Python | thyme/cmds/show.py | yuanliuus/thyme-blockchain | 9ea5cddc78f601fcbe77101d74147cf8190e423d | [
"Apache-2.0"
] | 6 | 2021-06-30T13:03:47.000Z | 2021-07-10T12:46:37.000Z | thyme/cmds/show.py | yuanliuus/thyme-blockchain | 9ea5cddc78f601fcbe77101d74147cf8190e423d | [
"Apache-2.0"
] | 8 | 2021-07-01T15:45:09.000Z | 2021-09-08T04:30:46.000Z | thyme/cmds/show.py | yuanliuus/thyme-blockchain | 9ea5cddc78f601fcbe77101d74147cf8190e423d | [
"Apache-2.0"
] | 11 | 2021-07-03T17:30:57.000Z | 2022-03-15T08:47:03.000Z | from typing import Any
import click
async def show_async(
rpc_port: int,
state: bool,
show_connections: bool,
exit_node: bool,
add_connection: str,
remove_connection: str,
block_header_hash_by_height: str,
block_by_header_hash: str,
) -> None:
import aiohttp
import time
import traceback
from time import localtime, struct_time
from typing import List, Optional
from thyme.consensus.block_record import BlockRecord
from thyme.rpc.full_node_rpc_client import FullNodeRpcClient
from thyme.server.outbound_message import NodeType
from thyme.types.full_block import FullBlock
from thyme.util.bech32m import encode_puzzle_hash
from thyme.util.byte_types import hexstr_to_bytes
from thyme.util.config import load_config
from thyme.util.default_root import DEFAULT_ROOT_PATH
from thyme.util.ints import uint16
try:
config = load_config(DEFAULT_ROOT_PATH, "config.yaml")
self_hostname = config["self_hostname"]
if rpc_port is None:
rpc_port = config["full_node"]["rpc_port"]
client = await FullNodeRpcClient.create(self_hostname, uint16(rpc_port), DEFAULT_ROOT_PATH, config)
if state:
blockchain_state = await client.get_blockchain_state()
if blockchain_state is None:
print("There is no blockchain found yet. Try again shortly")
return None
peak: Optional[BlockRecord] = blockchain_state["peak"]
difficulty = blockchain_state["difficulty"]
sub_slot_iters = blockchain_state["sub_slot_iters"]
synced = blockchain_state["sync"]["synced"]
sync_mode = blockchain_state["sync"]["sync_mode"]
total_iters = peak.total_iters if peak is not None else 0
num_blocks: int = 10
if sync_mode:
sync_max_block = blockchain_state["sync"]["sync_tip_height"]
sync_current_block = blockchain_state["sync"]["sync_progress_height"]
print(
"Current Blockchain Status: Full Node syncing to block",
sync_max_block,
"\nCurrently synced to block:",
sync_current_block,
)
if synced:
print("Current Blockchain Status: Full Node Synced")
print("\nPeak: Hash:", peak.header_hash if peak is not None else "")
elif peak is not None:
print(f"Current Blockchain Status: Not Synced. Peak height: {peak.height}")
else:
print("\nSearching for an initial chain\n")
print("You may be able to expedite with 'thyme show -a host:port' using a known node.\n")
if peak is not None:
if peak.is_transaction_block:
peak_time = peak.timestamp
else:
peak_hash = peak.header_hash
curr = await client.get_block_record(peak_hash)
while curr is not None and not curr.is_transaction_block:
curr = await client.get_block_record(curr.prev_hash)
peak_time = curr.timestamp
peak_time_struct = struct_time(localtime(peak_time))
print(
" Time:",
f"{time.strftime('%a %b %d %Y %T %Z', peak_time_struct)}",
f" Height: {peak.height:>10}\n",
)
print("Estimated network space: ", end="")
network_space_human_readable = blockchain_state["space"] / 1024 ** 4
if network_space_human_readable >= 1024:
network_space_human_readable = network_space_human_readable / 1024
if network_space_human_readable >= 1024:
network_space_human_readable = network_space_human_readable / 1024
print(f"{network_space_human_readable:.3f} EiB")
else:
print(f"{network_space_human_readable:.3f} PiB")
else:
print(f"{network_space_human_readable:.3f} TiB")
print(f"Current difficulty: {difficulty}")
print(f"Current VDF sub_slot_iters: {sub_slot_iters}")
print("Total iterations since the start of the blockchain:", total_iters)
print("")
print(" Height: | Hash:")
added_blocks: List[BlockRecord] = []
curr = await client.get_block_record(peak.header_hash)
while curr is not None and len(added_blocks) < num_blocks and curr.height > 0:
added_blocks.append(curr)
curr = await client.get_block_record(curr.prev_hash)
for b in added_blocks:
print(f"{b.height:>9} | {b.header_hash}")
else:
print("Blockchain has no blocks yet")
# if called together with show_connections, leave a blank line
if show_connections:
print("")
if show_connections:
connections = await client.get_connections()
print("Connections:")
print(
"Type IP Ports NodeID Last Connect"
+ " MiB Up|Dwn"
)
for con in connections:
last_connect_tuple = struct_time(localtime(con["last_message_time"]))
last_connect = time.strftime("%b %d %T", last_connect_tuple)
mb_down = con["bytes_read"] / (1024 * 1024)
mb_up = con["bytes_written"] / (1024 * 1024)
host = con["peer_host"]
# Strip IPv6 brackets
if host[0] == "[":
host = host[1:39]
# Nodetype length is 9 because INTRODUCER will be deprecated
if NodeType(con["type"]) is NodeType.FULL_NODE:
peak_height = con["peak_height"]
peak_hash = con["peak_hash"]
if peak_hash is None:
peak_hash = "No Info"
if peak_height is None:
peak_height = 0
con_str = (
f"{NodeType(con['type']).name:9} {host:38} "
f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
f" {con['node_id'].hex()[:8]}... "
f"{last_connect} "
f"{mb_up:7.1f}|{mb_down:<7.1f}"
f"\n "
f"-SB Height: {peak_height:8.0f} -Hash: {peak_hash[2:10]}..."
)
else:
con_str = (
f"{NodeType(con['type']).name:9} {host:38} "
f"{con['peer_port']:5}/{con['peer_server_port']:<5}"
f" {con['node_id'].hex()[:8]}... "
f"{last_connect} "
f"{mb_up:7.1f}|{mb_down:<7.1f}"
)
print(con_str)
# if called together with state, leave a blank line
if state:
print("")
if exit_node:
node_stop = await client.stop_node()
print(node_stop, "Node stopped")
if add_connection:
if ":" not in add_connection:
print("Enter a valid IP and port in the following format: 10.5.4.3:8000")
else:
ip, port = (
":".join(add_connection.split(":")[:-1]),
add_connection.split(":")[-1],
)
print(f"Connecting to {ip}, {port}")
try:
await client.open_connection(ip, int(port))
except Exception:
print(f"Failed to connect to {ip}:{port}")
if remove_connection:
result_txt = ""
if len(remove_connection) != 8:
result_txt = "Invalid NodeID. Do not include '.'"
else:
connections = await client.get_connections()
for con in connections:
if remove_connection == con["node_id"].hex()[:8]:
print("Attempting to disconnect", "NodeID", remove_connection)
try:
await client.close_connection(con["node_id"])
except Exception:
result_txt = f"Failed to disconnect NodeID {remove_connection}"
else:
result_txt = f"NodeID {remove_connection}... {NodeType(con['type']).name} "
f"{con['peer_host']} disconnected"
elif result_txt == "":
result_txt = f"NodeID {remove_connection}... not found"
print(result_txt)
if block_header_hash_by_height != "":
block_header = await client.get_block_record_by_height(block_header_hash_by_height)
if block_header is not None:
print(f"Header hash of block {block_header_hash_by_height}: " f"{block_header.header_hash.hex()}")
else:
print("Block height", block_header_hash_by_height, "not found")
if block_by_header_hash != "":
block: Optional[BlockRecord] = await client.get_block_record(hexstr_to_bytes(block_by_header_hash))
full_block: Optional[FullBlock] = await client.get_block(hexstr_to_bytes(block_by_header_hash))
# Would like to have a verbose flag for this
if block is not None:
assert full_block is not None
prev_b = await client.get_block_record(block.prev_hash)
if prev_b is not None:
difficulty = block.weight - prev_b.weight
else:
difficulty = block.weight
if block.is_transaction_block:
assert full_block.transactions_info is not None
block_time = struct_time(
localtime(
full_block.foliage_transaction_block.timestamp
if full_block.foliage_transaction_block
else None
)
)
block_time_string = time.strftime("%a %b %d %Y %T %Z", block_time)
cost = str(full_block.transactions_info.cost)
tx_filter_hash = "Not a transaction block"
if full_block.foliage_transaction_block:
tx_filter_hash = full_block.foliage_transaction_block.filter_hash
fees: Any = block.fees
else:
block_time_string = "Not a transaction block"
cost = "Not a transaction block"
tx_filter_hash = "Not a transaction block"
fees = "Not a transaction block"
address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"]
farmer_address = encode_puzzle_hash(block.farmer_puzzle_hash, address_prefix)
pool_address = encode_puzzle_hash(block.pool_puzzle_hash, address_prefix)
pool_pk = (
full_block.reward_chain_block.proof_of_space.pool_public_key
if full_block.reward_chain_block.proof_of_space.pool_public_key is not None
else "Pay to pool puzzle hash"
)
print(
f"Block Height {block.height}\n"
f"Header Hash 0x{block.header_hash.hex()}\n"
f"Timestamp {block_time_string}\n"
f"Weight {block.weight}\n"
f"Previous Block 0x{block.prev_hash.hex()}\n"
f"Difficulty {difficulty}\n"
f"Sub-slot iters {block.sub_slot_iters}\n"
f"Cost {cost}\n"
f"Total VDF Iterations {block.total_iters}\n"
f"Is a Transaction Block?{block.is_transaction_block}\n"
f"Deficit {block.deficit}\n"
f"PoSpace 'k' Size {full_block.reward_chain_block.proof_of_space.size}\n"
f"Plot Public Key 0x{full_block.reward_chain_block.proof_of_space.plot_public_key}\n"
f"Pool Public Key {pool_pk}\n"
f"Tx Filter Hash {tx_filter_hash}\n"
f"Farmer Address {farmer_address}\n"
f"Pool Address {pool_address}\n"
f"Fees Amount {fees}\n"
)
else:
print("Block with header hash", block_header_hash_by_height, "not found")
except Exception as e:
if isinstance(e, aiohttp.ClientConnectorError):
print(f"Connection error. Check if full node rpc is running at {rpc_port}")
print("This is normal if full node is still starting up")
else:
tb = traceback.format_exc()
print(f"Exception from 'show' {tb}")
client.close()
await client.await_closed()
@click.command("show", short_help="Show node information")
@click.option(
"-p",
"--rpc-port",
help=(
"Set the port where the Full Node is hosting the RPC interface. "
"See the rpc_port under full_node in config.yaml"
),
type=int,
default=None,
)
@click.option(
"-wp",
"--wallet-rpc-port",
help="Set the port where the Wallet is hosting the RPC interface. See the rpc_port under wallet in config.yaml",
type=int,
default=None,
)
@click.option("-s", "--state", help="Show the current state of the blockchain", is_flag=True, type=bool, default=False)
@click.option(
"-c", "--connections", help="List nodes connected to this Full Node", is_flag=True, type=bool, default=False
)
@click.option("-e", "--exit-node", help="Shut down the running Full Node", is_flag=True, default=False)
@click.option("-a", "--add-connection", help="Connect to another Full Node by ip:port", type=str, default="")
@click.option(
"-r", "--remove-connection", help="Remove a Node by the first 8 characters of NodeID", type=str, default=""
)
@click.option(
"-bh", "--block-header-hash-by-height", help="Look up a block header hash by block height", type=str, default=""
)
@click.option("-b", "--block-by-header-hash", help="Look up a block by block header hash", type=str, default="")
def show_cmd(
rpc_port: int,
wallet_rpc_port: int,
state: bool,
connections: bool,
exit_node: bool,
add_connection: str,
remove_connection: str,
block_header_hash_by_height: str,
block_by_header_hash: str,
) -> None:
import asyncio
asyncio.run(
show_async(
rpc_port,
state,
connections,
exit_node,
add_connection,
remove_connection,
block_header_hash_by_height,
block_by_header_hash,
)
)
| 46.304478 | 119 | 0.537906 |
d8edba85e575ced3f05e1350a4a75acad981a28d | 4,292 | py | Python | river/dummy.py | brcharron/creme | 25290780f6bba0eb030215194e81b120d0219389 | [
"BSD-3-Clause"
] | 1 | 2020-12-04T18:56:19.000Z | 2020-12-04T18:56:19.000Z | river/dummy.py | brcharron/creme | 25290780f6bba0eb030215194e81b120d0219389 | [
"BSD-3-Clause"
] | null | null | null | river/dummy.py | brcharron/creme | 25290780f6bba0eb030215194e81b120d0219389 | [
"BSD-3-Clause"
] | null | null | null | """Dummy estimators.
This module is here for testing purposes, as well as providing baseline performances.
"""
import collections
from river import base
from river import stats
__all__ = ['NoChangeClassifier', 'PriorClassifier', 'StatisticRegressor']
class NoChangeClassifier(base.Classifier):
"""Dummy classifier which returns the last class seen.
The predict_one method will output the last class seen whilst predict_proba_one will
return 1 for the last class seen and 0 for the others.
Attributes
----------
last_class
The last class seen.
classes
The set of classes seen.
Examples
--------
Taken from example 2.1 from [this page](https://www.cms.waikato.ac.nz/~abifet/book/chapter_2.html).
>>> import pprint
>>> from river import dummy
>>> sentences = [
... ('glad happy glad', '+'),
... ('glad glad joyful', '+'),
... ('glad pleasant', '+'),
... ('miserable sad glad', '−')
... ]
>>> model = dummy.NoChangeClassifier()
>>> for sentence, label in sentences:
... model = model.learn_one(sentence, label)
>>> new_sentence = 'glad sad miserable pleasant glad'
>>> model.predict_one(new_sentence)
'−'
>>> pprint.pprint(model.predict_proba_one(new_sentence))
{'+': 0, '−': 1}
"""
def __init__(self):
self.last_class = None
self.classes = set()
@property
def _multiclass(self):
return True
def learn_one(self, x, y):
self.last_class = y
self.classes.add(y)
return self
def predict_one(self, x):
return self.last_class
def predict_proba_one(self, x):
probas = {c: 0 for c in self.classes}
probas[self.last_class] = 1
return probas
class PriorClassifier(base.Classifier):
"""Dummy classifier which uses the prior distribution.
The `predict_one` method will output the most common class whilst `predict_proba_one` will
return the normalized class counts.
Attributes
----------
counts : collections.Counter
Class counts.
n : int
Total number of seen instances.
Examples
--------
Taken from example 2.1 from [this page](https://www.cms.waikato.ac.nz/~abifet/book/chapter_2.html)
>>> from river import dummy
>>> sentences = [
... ('glad happy glad', '+'),
... ('glad glad joyful', '+'),
... ('glad pleasant', '+'),
... ('miserable sad glad', '−')
... ]
>>> model = dummy.PriorClassifier()
>>> for sentence, label in sentences:
... model = model.learn_one(sentence, label)
>>> new_sentence = 'glad sad miserable pleasant glad'
>>> model.predict_one(new_sentence)
'+'
>>> model.predict_proba_one(new_sentence)
{'+': 0.75, '−': 0.25}
"""
def __init__(self):
self.counts = collections.Counter()
self.n = 0
@property
def _multiclass(self):
return True
def learn_one(self, x, y):
self.counts.update([y])
self.n += 1
return self
def predict_proba_one(self, x):
return {label: count / self.n for label, count in self.counts.items()}
class StatisticRegressor(base.Regressor):
"""Dummy regressor that uses a univariate statistic to make predictions.
Parameters
----------
statistic
Examples
--------
>>> from pprint import pprint
>>> from river import dummy
>>> from river import stats
>>> sentences = [
... ('glad happy glad', 3),
... ('glad glad joyful', 3),
... ('glad pleasant', 2),
... ('miserable sad glad', -3)
... ]
>>> model = dummy.StatisticRegressor(stats.Mean())
>>> for sentence, score in sentences:
... model = model.learn_one(sentence, score)
>>> new_sentence = 'glad sad miserable pleasant glad'
>>> model.predict_one(new_sentence)
1.25
"""
def __init__(self, statistic: stats.Univariate):
self.statistic = statistic
@classmethod
def _default_params(cls):
return {'statistic': stats.Mean()}
def learn_one(self, x, y):
self.statistic.update(y)
return self
def predict_one(self, x):
return self.statistic.get()
| 23.712707 | 103 | 0.595527 |
9ce07efd66c173ebb5f038c2b930ae83e69ca9f9 | 1,539 | py | Python | cirq-rigetti/cirq_rigetti/_qcs_api_client_decorator.py | LLcat1217/Cirq | b88069f7b01457e592ad69d6b413642ef11a56b8 | [
"Apache-2.0"
] | 1 | 2021-04-29T15:30:32.000Z | 2021-04-29T15:30:32.000Z | cirq-rigetti/cirq_rigetti/_qcs_api_client_decorator.py | bradyb/Cirq | 610b0d4ea3a7862169610797266734c844ddcc1f | [
"Apache-2.0"
] | 4 | 2022-01-16T14:12:15.000Z | 2022-02-24T03:58:46.000Z | cirq-rigetti/cirq_rigetti/_qcs_api_client_decorator.py | bradyb/Cirq | 610b0d4ea3a7862169610797266734c844ddcc1f | [
"Apache-2.0"
] | 2 | 2021-09-22T11:16:46.000Z | 2021-09-23T12:55:22.000Z | # Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from qcs_api_client.client import build_sync_client
def _provide_default_client(function):
"""A decorator that will initialize an `httpx.Client` and pass
it to the wrapped function as a kwarg if not already present. This
eases provision of a default `httpx.Client` with Rigetti
QCS configuration and authentication. If the decorator initializes a
default client, it will invoke the wrapped function from within the
`httpx.Client` context.
Args:
function: The decorated function.
Returns:
The `function` wrapped with a default `client`.
"""
@functools.wraps(function)
def wrapper(*args, **kwargs):
if 'client' in kwargs:
return function(*args, **kwargs)
with build_sync_client() as client: # coverage: ignore
# coverage: ignore
kwargs['client'] = client
return function(*args, **kwargs)
return wrapper
| 34.2 | 74 | 0.709552 |
3882233212d020bf9f97dab7bbe223d9a58900b0 | 1,103 | py | Python | main.py | mayan5/pythonchallenge | 36b6578a523a4e89f0801a1a62227a44b1e2b89b | [
"RSA-MD"
] | null | null | null | main.py | mayan5/pythonchallenge | 36b6578a523a4e89f0801a1a62227a44b1e2b89b | [
"RSA-MD"
] | null | null | null | main.py | mayan5/pythonchallenge | 36b6578a523a4e89f0801a1a62227a44b1e2b89b | [
"RSA-MD"
] | null | null | null | import os
import csv
#setting up the file path
polling = os.path.join("election_data.csv")
total_votes = 0.00
candidates = {}
#this dictionary is going to be {candidate : votes}}
winner = ""
winner_percent = 0
with open(polling) as csvfile:
csvreader = csv.reader(csvfile, delimiter=",")
# Read the header row first (skip this step if there is now header)
csv_header = next(csvreader)
for row in csvreader:
total_votes += 1
vote = row[2]
if vote in candidates:
candidates[vote] += 1
else:
candidates[vote]= 1
print(f'Election Results')
print(f'--------------------')
print(f'Total Votes: {total_votes}')
print(f'--------------------')
for candidate in candidates:
votes = candidates[candidate]
percent = votes/total_votes
print(f'{candidate}: {percent:.2%} ({votes})')
if percent > winner_percent:
winner = candidate
winner_percent = percent
print(f'--------------------')
print(f'Winner: {winner}')
print(f'--------------------') | 22.510204 | 72 | 0.564823 |
c2a002ab2ebf400a82a69029a32f86d9c4bab8ff | 1,083 | py | Python | stage/configuration/test_field_merger_processor.py | Sentienz/datacollector-tests | ca27988351dc3366488098b5db6c85a8be2f7b85 | [
"Apache-2.0"
] | null | null | null | stage/configuration/test_field_merger_processor.py | Sentienz/datacollector-tests | ca27988351dc3366488098b5db6c85a8be2f7b85 | [
"Apache-2.0"
] | 1 | 2019-04-24T11:06:38.000Z | 2019-04-24T11:06:38.000Z | stage/configuration/test_field_merger_processor.py | anubandhan/datacollector-tests | 301c024c66d68353735256b262b681dd05ba16cc | [
"Apache-2.0"
] | 2 | 2019-05-24T06:34:37.000Z | 2020-03-30T11:48:18.000Z | import pytest
from streamsets.testframework.decorators import stub
@stub
def test_fields_to_merge(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'},
{'on_record_error': 'STOP_PIPELINE'},
{'on_record_error': 'TO_ERROR'}])
def test_on_record_error(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'overwrite_fields': False}, {'overwrite_fields': True}])
def test_overwrite_fields(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_preconditions(sdc_builder, sdc_executor):
pass
@stub
def test_required_fields(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'source_field_does_not_exist': 'CONTINUE'},
{'source_field_does_not_exist': 'TO_ERROR'}])
def test_source_field_does_not_exist(sdc_builder, sdc_executor, stage_attributes):
pass
| 26.414634 | 103 | 0.680517 |
a2ff2a928370aaec7555382b9d77908b5b490c06 | 701 | py | Python | Teste.py | PedroHAlvesS/Ativar-Eventos-Chase--gc- | 207ac044097aa0f0fb4981bb274916b2b9bc5fb4 | [
"MIT"
] | null | null | null | Teste.py | PedroHAlvesS/Ativar-Eventos-Chase--gc- | 207ac044097aa0f0fb4981bb274916b2b9bc5fb4 | [
"MIT"
] | null | null | null | Teste.py | PedroHAlvesS/Ativar-Eventos-Chase--gc- | 207ac044097aa0f0fb4981bb274916b2b9bc5fb4 | [
"MIT"
] | null | null | null | import arquivo
# arquivos dos eventos
eclipse = ["InitEclipseTimeEvent.lua", "EventMonster.stg", "InitClientContents.lua", "InitEclipseCollect.lua", "InitEclipsePlot.lua"]
arqlista = [eclipse]
for nome in arqlista:
arquivo.arquivoExiste(nome)
# ativar o evento
# menu dos eventos
while True:
menu = input("Deseja ativar qual evento?\n1- Eclipse\n")
if ('1' in menu):
break
inicio = input("Informe a data de início do evento (--ano, mes, dia, hora, minuto). Ex.: 2020, 12....\n")
fim = input("Informe a data do fim do evento (--ano, mes, dia, hora, minuto). Ex.: 2020, 12....\n")
inicio = '{ ' + inicio + ' }'
fim = '{ ' + fim + ' }'
arquivo.eclipseAtivar(arqlista[0], inicio, fim)
| 38.944444 | 133 | 0.666191 |
87aa745ca79020f6661bdf1e8ed9e1a0a5d1cb40 | 6,693 | py | Python | new_system/send_recieve.py | TakenokoEmpire/mypj | fcc56c1113c88d6611b60d0855d6c79df7add7fb | [
"MIT"
] | null | null | null | new_system/send_recieve.py | TakenokoEmpire/mypj | fcc56c1113c88d6611b60d0855d6c79df7add7fb | [
"MIT"
] | null | null | null | new_system/send_recieve.py | TakenokoEmpire/mypj | fcc56c1113c88d6611b60d0855d6c79df7add7fb | [
"MIT"
] | null | null | null | import requests
from typing import Dict, Optional
# URL = "https://damp-earth-70561.herokuapp.com"
# player_id_D = "d2b8e778-20f3-4744-920b-6eb67eacc901"
# player_id_D2 = "6a9bbf53-df31-4e02-a585-e2173085606e"
# headers={"Content-Type": "application/json"}
session = requests.Session()
class SendReceive():
def __init__(
self,
url: str = "https://damp-earth-70561.herokuapp.com",
player_id: str = "d2b8e778-20f3-4744-920b-6eb67eacc901",
player_name: str = "D",
room_id: Optional[int] = None,
headers: Dict[str, str] = {"Content-Type": "application/json"}) -> None:
self.url = url
self.player_name = player_name
if self.player_name == "D":
self.player_id = player_id
else:
self.player_id = "6a9bbf53-df31-4e02-a585-e2173085606e"
self.room_id = room_id
self.headers = headers
# すべての部屋情報の取得
def get_all_room(self):
url_get_all_room = self.url + "/rooms"
result_all_room = session.get(url_get_all_room)
# print(result_all_room.status_code)
# print(result_all_room.json())
# 対戦部屋へユーザを登録
def enter_room(self):
url_enter_romm = self.url + "/rooms"
post_data_enter = {
"player_id": self.player_id,
"room_id": self.room_id
}
result_enter = session.post(
url_enter_romm, headers=self.headers, json=post_data_enter)
# print(result_enter.status_code)
# print(result_enter.json())
self.room_id = result_enter.json()["id"]
# 指定した部屋情報の取得
def get_room(self):
url_get_room = self.url + "/rooms/" + str(self.room_id)
result_room = session.get(url_get_room)
# print(result_room.status_code)
# print(result_room.json())
# 対戦情報テーブル(現在のターン, hit&blowの履歴, 勝敗の判定)を取得する
def get_table(self):
url_get_table = self.url + "/rooms/" + \
str(self.room_id) + "/players/" + self.player_name + "/table"
result_table = session.get(url_get_table)
# print(result_table.status_code)
# print(result_table.json())
return result_table.json()
# 相手に当てさせる数字を登録する
def post_hidden(self, ans: str):
url_post_hidden = self.url + "/rooms/" + \
str(self.room_id) + "/players/" + self.player_name + "/hidden"
post_data_hidden = {
"player_id": self.player_id,
"hidden_number": ans
}
result_hidden = session.post(
url_post_hidden, headers=self.headers, json=post_data_hidden)
# print(result_hidden.status_code)
# print(result_hidden.json())
return result_hidden.json()
# 推測した数字を登録する
def post_guess(self, guess: str):
url_post_guess = self.url + "/rooms/" + \
str(self.room_id) + "/players/" + \
self.player_name + "/table/guesses"
post_data_guess = {
"player_id": self.player_id,
"guess": guess
}
result_guess = session.post(
url_post_guess, headers=self.headers, json=post_data_guess)
# print(result_guess.status_code)
# print(result_guess.json())
return result_guess.json()
# def fugaku(self):
"""試し
drun = SendReceive(room_id=4001)
drun.enter_room()
200
{'id': 4001, 'state': 1, 'player1': 'D', 'player2': None}
d2run = SendReceive(room_id=4001, player_name="D2")
d2run.enter_room()
200
{'id': 4001, 'state': 2, 'player1': 'D', 'player2': 'D2'}
drun.get_room()
200
{'id': 4001, 'state': 2, 'player1': 'D', 'player2': 'D2'}
drun.get_table()
200
{'room_id': 4001, 'state': 2, 'now_player': None, 'table': None, 'opponent_table': None, 'winner':
None, 'game_end_count': None}
drun.post_hidden(ans="12345")
200
{'selecting': True}
d2run.post_hidden(ans="02abd")
200
{'selecting': False}
drun.get_table()
200
{'room_id': 4001, 'state': 2, 'now_player': 'D', 'table': [], 'opponent_table': [], 'winner': None, 'game_end_count': None}
drun.post_guess("12345")
200
{'room_id': 4001, 'now_player': 'D2', 'guesses': ['12345']}
drun.get_table()
200
{'room_id': 4001, 'state': 2, 'now_player': 'D2', 'table': [{'guess': '12345', 'hit': 1, 'blow': 0}], 'opponent_table': [], 'winner': None, 'game_end_count': None}
d2run.post_guess("123ab")
200
{'room_id': 4001, 'now_player': 'D', 'guesses': ['123ab']}
drun.get_table()
200
, 'opponent_table': [{'guess': '123ab', 'hit': 3, 'blow': 0}], 'winner': None, 'game_end_count': 1}
drun.post_guess("12345")
200
{'room_id': 4001, 'now_player': 'D2', 'guesses': ['12345', '12345']}
drun.get_table()
200
{'room_id': 4001, 'state': 2, 'now_player': 'D2', 'table': [{'guess': '12345', 'hit': 1, 'blow': 0}, {'guess': '12345', 'hit': 1, 'blow': 0}], 'opponent_table': [{'guess': '123ab', 'hit': 3, 'blow': 0}], 'winner': None, 'game_end_count': None}
d2run.post_guess("12345")
200
{'room_id': 4001, 'now_player': 'D', 'guesses': ['123ab', '12345']}
drun.get_table()
200
{'room_id': 4001, 'state': 3, 'now_player': 'D', 'table': [{'guess': '12345', 'hit': 1, 'blow': 0}, {'guess': '12345', 'hit': 1, 'blow': 0}], 'opponent_table': [{'guess': '123ab', 'hit': 3, 'blow':
0}, {'guess': '12345', 'hit': 5, 'blow': 0}], 'winner': 'D2', 'game_end_count': 2}
d2run.get_table()
200
{'room_id': 4001, 'state': 3, 'now_player': 'D', 'table': [{'guess': '123ab', 'hit': 3, 'blow': 0},
{'guess': '12345', 'hit': 5, 'blow': 0}], 'opponent_table': [{'guess': '12345', 'hit': 1, 'blow': 0}, {'guess': '12345', 'hit': 1, 'blow': 0}], 'winner': 'D2', 'game_end_count': 2}
drun.post_guess("11111")
400
{'detail': 'format error 11111'}
drun.post_guess("02abd")
200
{'room_id': 4001, 'now_player': 'D2', 'guesses': ['12345', '12345', '02abd']}
drun.get_table()
200
{'room_id': 4001, 'state': 3, 'now_player': 'D2', 'table': [{'guess': '12345', 'hit': 1, 'blow': 0}, {'guess': '12345', 'hit': 1, 'blow': 0}, {'guess': '02abd', 'hit': 5, 'blow': 0}], 'opponent_table': [{'guess': '123ab', 'hit': 3, 'blow': 0}, {'guess': '12345', 'hit': 5, 'blow': 0}], 'winner': None, 'game_end_count': None}
drun.post_hidden("02abd")
400
{'detail': 'you can not select hidden'}
drun.get_table()
200
{'room_id': 4001, 'state': 3, 'now_player': 'D2', 'table': [{'guess': '12345', 'hit': 1, 'blow': 0}, {'guess': '12345', 'hit': 1, 'blow': 0}, {'guess': '02abd', 'hit': 5, 'blow': 0}], 'opponent_table': [{'guess': '123ab', 'hit': 3, 'blow': 0}, {'guess': '12345', 'hit': 5, 'blow': 0}], 'winner': None, 'game_end_count': None}
"""
| 38.912791 | 329 | 0.582101 |
7b5388d8dbfd57802b9172b7df70b27b4902b499 | 6,803 | py | Python | src/datadog_api_client/v1/model/slo_correction_update_request.py | rchenzheng/datadog-api-client-python | 2e86ac098c6f0c7fdd90ed218224587c0f8eafef | [
"Apache-2.0"
] | null | null | null | src/datadog_api_client/v1/model/slo_correction_update_request.py | rchenzheng/datadog-api-client-python | 2e86ac098c6f0c7fdd90ed218224587c0f8eafef | [
"Apache-2.0"
] | null | null | null | src/datadog_api_client/v1/model/slo_correction_update_request.py | rchenzheng/datadog-api-client-python | 2e86ac098c6f0c7fdd90ed218224587c0f8eafef | [
"Apache-2.0"
] | null | null | null | # Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
import re # noqa: F401
import sys # noqa: F401
from datadog_api_client.v1.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from datadog_api_client.v1.model.slo_correction_update_data import SLOCorrectionUpdateData
globals()["SLOCorrectionUpdateData"] = SLOCorrectionUpdateData
class SLOCorrectionUpdateRequest(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
"data": (SLOCorrectionUpdateData,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
"data": "data", # noqa: E501
}
_composed_schemas = {}
required_properties = set(
[
"_data_store",
"_check_type",
"_spec_property_naming",
"_path_to_item",
"_configuration",
"_visited_composed_classes",
]
)
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""SLOCorrectionUpdateRequest - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
data (SLOCorrectionUpdateData): [optional] # noqa: E501
"""
_check_type = kwargs.pop("_check_type", True)
_spec_property_naming = kwargs.pop("_spec_property_naming", False)
_path_to_item = kwargs.pop("_path_to_item", ())
_configuration = kwargs.pop("_configuration", None)
_visited_composed_classes = kwargs.pop("_visited_composed_classes", ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments."
% (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
| 39.783626 | 108 | 0.591798 |
efa34bedb054cfaa02db7bc266ac6e0eecd4dc49 | 1,355 | py | Python | app/utc_time.py | coder8jedi/ReminderBot | f493b3e6debc8fb4771ec84c1b40dc056cbd8500 | [
"Apache-2.0"
] | 3 | 2021-08-03T07:05:09.000Z | 2022-03-10T09:35:44.000Z | app/utc_time.py | coder8jedi/ReminderBot | f493b3e6debc8fb4771ec84c1b40dc056cbd8500 | [
"Apache-2.0"
] | null | null | null | app/utc_time.py | coder8jedi/ReminderBot | f493b3e6debc8fb4771ec84c1b40dc056cbd8500 | [
"Apache-2.0"
] | null | null | null | import math
def time_operation(local_time: str, utc_code: str, monday: bool, tuesday: bool, wednesday: bool, thursday: bool,
friday: bool, saturday: bool, sunday: bool):
days = [monday, tuesday, wednesday, thursday, friday, saturday, sunday]
t = ''
for y in days:
t += str(int(y)) + '|'
local_days = t[0:-1]
minutes = int(local_time[3:5]) + 60 * int(local_time[0:2])
utc_minutes = int(utc_code[4:6]) + 60 * int(utc_code[1:3])
if utc_code[0] == '+':
minutes_ = minutes - utc_minutes
else:
minutes_ = minutes + utc_minutes
#
hours_ = math.floor(minutes_ / 60)
minutes_ = minutes_ - hours_ * 60
#
if hours_ < 0:
hours_ = hours_ + 24
monday_ = days[0]
for y in range(0, 6):
days[y] = days[y + 1]
days[6] = monday_
elif hours_ > 23:
hours_ = hours_ - 24
sunday_ = days[6]
for y in range(6, 0, -1):
days[y] = days[y - 1]
days[0] = sunday_
#
if hours_ < 10:
hours_ = '0' + str(hours_)
if minutes_ < 10:
minutes_ = '0' + str(minutes_)
time = str(hours_) + ':' + str(minutes_)
#
t = ''
for y in days:
t += str(int(y)) + '|'
days_ = t[0:-1]
return time, local_days, days_
| 30.111111 | 113 | 0.508487 |
1e4e008b923844a3f1a55a5b12ebf9939d021a6f | 671 | py | Python | examples/math/general_purpose.py | gugarosa/opfython | 19b467a92d85c7c26d231efec770645096827b4e | [
"Apache-2.0"
] | 26 | 2018-04-24T20:16:18.000Z | 2022-03-09T14:03:28.000Z | examples/math/general_purpose.py | gugarosa/opfython | 19b467a92d85c7c26d231efec770645096827b4e | [
"Apache-2.0"
] | 4 | 2020-12-26T14:57:18.000Z | 2022-03-30T02:34:18.000Z | examples/math/general_purpose.py | gugarosa/opfython | 19b467a92d85c7c26d231efec770645096827b4e | [
"Apache-2.0"
] | 16 | 2019-05-20T15:41:56.000Z | 2022-03-23T17:59:53.000Z | import numpy as np
import opfython.math.general as g
# Defining array, labels and predictions
array = np.asarray([1.5, 2, 0.5, 1.25, 1.75, 3])
labels = [0, 0, 0, 1, 1, 1, 2]
preds = [0, 0, 1, 1, 0, 1, 2]
# Normalizing the array
norm_array = g.normalize(array)
print(norm_array)
# Calculating the confusion matrix
c_matrix = g.confusion_matrix(labels, preds)
print(c_matrix)
# Calculating OPF-like accuracy
opf_acc = g.opf_accuracy(labels, preds)
print(opf_acc)
# Calculating OPF-like accuracy per label
opf_acc_per_label = g.opf_accuracy_per_label(labels, preds)
print(opf_acc_per_label)
# Calculating purity measure
purity = g.purity(labels, preds)
print(purity)
| 23.137931 | 59 | 0.743666 |
959e6e1bc9dc2eb1a6b57ce00464d2c4815f88c4 | 23,866 | py | Python | models/official/mask_rcnn/mask_rcnn_model.py | lionsky123/tpu | f4468c3b2e9a0214a40ac80a76a9cf5ff237d030 | [
"Apache-2.0"
] | 2 | 2020-08-06T10:27:56.000Z | 2020-08-06T10:36:50.000Z | models/official/mask_rcnn/mask_rcnn_model.py | lionsky123/tpu | f4468c3b2e9a0214a40ac80a76a9cf5ff237d030 | [
"Apache-2.0"
] | null | null | null | models/official/mask_rcnn/mask_rcnn_model.py | lionsky123/tpu | f4468c3b2e9a0214a40ac80a76a9cf5ff237d030 | [
"Apache-2.0"
] | 2 | 2019-07-03T20:53:03.000Z | 2021-09-18T08:18:32.000Z | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Model defination for the Mask-RCNN Model.
Defines model_fn of Mask-RCNN for TF Estimator. The model_fn includes Mask-RCNN
model architecture, loss function, learning rate schedule, and evaluation
procedure.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import re
import six
import tensorflow as tf
import anchors
import fpn
import heads
import learning_rates
import losses
import postprocess_ops
import resnet
import roi_ops
import spatial_transform_ops
import training_ops
import sys
sys.path.insert(0, 'tpu/models/official/mnasnet')
import mnasnet_models
def create_optimizer(learning_rate, params):
"""Creates optimized based on the specified flags."""
if params['optimizer'] == 'momentum':
optimizer = tf.train.MomentumOptimizer(
learning_rate, momentum=params['momentum'])
elif params['optimizer'] == 'adam':
optimizer = tf.train.AdamOptimizer(learning_rate)
elif params['optimizer'] == 'adadelta':
optimizer = tf.train.AdadeltaOptimizer(learning_rate)
elif params['optimizer'] == 'adagrad':
optimizer = tf.train.AdagradOptimizer(learning_rate)
elif params['optimizer'] == 'rmsprop':
optimizer = tf.train.RMSPropOptimizer(
learning_rate, momentum=params['momentum'])
elif params['optimizer'] == 'lars':
optimizer = tf.contrib.opt.LARSOptimizer(
learning_rate,
momentum=params['momentum'],
weight_decay=params['lars_weight_decay'],
skip_list=['batch_normalization', 'bias'])
else:
raise ValueError('Unsupported optimizer type %s.' % params['optimizer'])
return optimizer
def remove_variables(variables, prefix):
"""Removes low-level variables from the input.
Removing low-level parameters (e.g., initial convolution layer) from training
usually leads to higher training speed and slightly better testing accuracy.
The intuition is that the low-level architecture (e.g., ResNet-50) is able to
capture low-level features such as edges; therefore, it does not need to be
fine-tuned for the detection task.
Args:
variables: all the variables in training
prefix: prefix of backbone
Returns:
var_list: a list containing variables for training
"""
# Freeze at conv2 based on reference model.
# Reference: https://github.com/facebookresearch/Detectron/blob/master/detectron/core/config.py#L194 # pylint: disable=line-too-long
remove_list = []
remove_list.append(prefix + 'conv2d/')
remove_list.append(prefix + 'batch_normalization/')
for i in range(1, 11):
remove_list.append(prefix + 'conv2d_{}/'.format(i))
remove_list.append(prefix + 'batch_normalization_{}/'.format(i))
def _is_kept(variable):
for rm_str in remove_list:
if rm_str in variable.name:
return False
return True
var_list = [v for v in variables if _is_kept(v)]
return var_list
def build_model_graph(features, labels, is_training, params):
"""Builds the forward model graph."""
model_outputs = {}
if params['transpose_input'] and is_training:
features['images'] = tf.transpose(features['images'], [3, 0, 1, 2])
batch_size, image_height, image_width, _ = (
features['images'].get_shape().as_list())
if 'source_ids' not in features:
features['source_ids'] = -1 * tf.ones([batch_size], dtype=tf.float32)
all_anchors = anchors.Anchors(params['min_level'], params['max_level'],
params['num_scales'], params['aspect_ratios'],
params['anchor_scale'],
(image_height, image_width))
if 'resnet' in params['backbone']:
with tf.variable_scope(params['backbone']):
resnet_fn = resnet.resnet_v1(
params['backbone'],
num_batch_norm_group=params['num_batch_norm_group'])
backbone_feats = resnet_fn(
features['images'],
(params['is_training_bn'] and is_training))
elif 'mnasnet' in params['backbone']:
with tf.variable_scope(params['backbone']):
_, endpoints = mnasnet_models.build_mnasnet_base(
features['images'],
params['backbone'],
training=(params['is_training_bn'] and is_training),
override_params={'use_keras': False})
backbone_feats = {
2: endpoints['reduction_2'],
3: endpoints['reduction_3'],
4: endpoints['reduction_4'],
5: endpoints['reduction_5'],
}
else:
raise ValueError('Not a valid backbone option: %s' % params['backbone'])
fpn_feats = fpn.fpn(
backbone_feats, params['min_level'], params['max_level'])
model_outputs.update({
'fpn_features': fpn_feats,
})
rpn_score_outputs, rpn_box_outputs = heads.rpn_head(
fpn_feats,
params['min_level'], params['max_level'],
len(params['aspect_ratios'] * params['num_scales']))
if is_training:
rpn_pre_nms_topn = params['rpn_pre_nms_topn']
rpn_post_nms_topn = params['rpn_post_nms_topn']
else:
rpn_pre_nms_topn = params['test_rpn_pre_nms_topn']
rpn_post_nms_topn = params['test_rpn_post_nms_topn']
rpn_box_scores, rpn_box_rois = roi_ops.multilevel_propose_rois(
rpn_score_outputs,
rpn_box_outputs,
all_anchors,
features['image_info'],
rpn_pre_nms_topn,
rpn_post_nms_topn,
params['rpn_nms_threshold'],
params['rpn_min_size'],
bbox_reg_weights=None,
use_batched_nms=(not params['use_tpu'] and params['use_batched_nms']))
rpn_box_rois = tf.to_float(rpn_box_rois)
if is_training:
rpn_box_rois = tf.stop_gradient(rpn_box_rois)
rpn_box_scores = tf.stop_gradient(rpn_box_scores)
if is_training:
# Sampling
box_targets, class_targets, rpn_box_rois, proposal_to_label_map = (
training_ops.proposal_label_op(
rpn_box_rois,
labels['gt_boxes'],
labels['gt_classes'],
features['image_info'],
batch_size_per_im=params['batch_size_per_im'],
fg_fraction=params['fg_fraction'],
fg_thresh=params['fg_thresh'],
bg_thresh_hi=params['bg_thresh_hi'],
bg_thresh_lo=params['bg_thresh_lo']))
# Performs multi-level RoIAlign.
box_roi_features = spatial_transform_ops.multilevel_crop_and_resize(
fpn_feats, rpn_box_rois, output_size=7)
class_outputs, box_outputs, _ = heads.box_head(
box_roi_features, num_classes=params['num_classes'],
mlp_head_dim=params['fast_rcnn_mlp_head_dim'])
if not is_training:
if not params['use_tpu'] and params['use_batched_nms']:
generate_detections_fn = postprocess_ops.generate_detections_gpu
else:
generate_detections_fn = postprocess_ops.generate_detections_tpu
detections = generate_detections_fn(
class_outputs,
box_outputs,
rpn_box_rois,
features['image_info'],
params['test_rpn_post_nms_topn'],
params['test_detections_per_image'],
params['test_nms'],
params['bbox_reg_weights'])
model_outputs.update({
'num_detections': detections[0],
'detection_boxes': detections[1],
'detection_classes': detections[2],
'detection_scores': detections[3],
})
else:
encoded_box_targets = training_ops.encode_box_targets(
rpn_box_rois, box_targets, class_targets, params['bbox_reg_weights'])
model_outputs.update({
'rpn_score_outputs': rpn_score_outputs,
'rpn_box_outputs': rpn_box_outputs,
'class_outputs': class_outputs,
'box_outputs': box_outputs,
'class_targets': class_targets,
'box_targets': encoded_box_targets,
'box_rois': rpn_box_rois,
})
# Faster-RCNN mode.
if not params['include_mask']:
return model_outputs
# Mask sampling
if not is_training:
selected_box_rois = model_outputs['detection_boxes']
class_indices = tf.to_int32(model_outputs['detection_classes'])
else:
(selected_class_targets, selected_box_targets, selected_box_rois,
proposal_to_label_map) = (
training_ops.select_fg_for_masks(
class_targets, box_targets, rpn_box_rois,
proposal_to_label_map,
max_num_fg=int(
params['batch_size_per_im'] * params['fg_fraction'])))
class_indices = tf.to_int32(selected_class_targets)
mask_roi_features = spatial_transform_ops.multilevel_crop_and_resize(
fpn_feats, selected_box_rois, output_size=14)
mask_outputs = heads.mask_head(
mask_roi_features,
class_indices,
num_classes=params['num_classes'],
mrcnn_resolution=params['mrcnn_resolution'])
if is_training:
mask_targets = training_ops.get_mask_targets(
selected_box_rois, proposal_to_label_map, selected_box_targets,
labels['cropped_gt_masks'], params['mrcnn_resolution'])
model_outputs.update({
'mask_outputs': mask_outputs,
'mask_targets': mask_targets,
'selected_class_targets': selected_class_targets,
})
else:
model_outputs.update({
'detection_masks': tf.nn.sigmoid(mask_outputs),
})
return model_outputs
def _build_assigment_map(optimizer, prefix=None, skip_variables_regex=None):
"""Generate assigment map for loading checkpoints."""
optimizer_vars = set([var.name for var in optimizer.variables()])
all_vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=prefix)
if not prefix:
prefix = ''
assignment_map = {}
for var in all_vars:
if var.name not in optimizer_vars:
var_name = var.name
# Trim the index of the variable.
if ':' in var_name:
var_name = var_name[:var_name.rindex(':')]
if skip_variables_regex and re.match(skip_variables_regex,
var_name[len(prefix):]):
continue
assignment_map[var_name[len(prefix):]] = var
return assignment_map
def _model_fn(features, labels, mode, params, variable_filter_fn=None):
"""Model defination for the Mask-RCNN model based on ResNet.
Args:
features: the input image tensor and auxiliary information, such as
`image_info` and `source_ids`. The image tensor has a shape of
[batch_size, height, width, 3]. The height and width are fixed and equal.
labels: the input labels in a dictionary. The labels include score targets
and box targets which are dense label maps. The labels are generated from
get_input_fn function in data/dataloader.py
mode: the mode of TPUEstimator including TRAIN, EVAL, and PREDICT.
params: the dictionary defines hyperparameters of model. The default
settings are in default_hparams function in this file.
variable_filter_fn: the filter function that takes trainable_variables and
returns the variable list after applying the filter rule.
Returns:
tpu_spec: the TPUEstimatorSpec to run training, evaluation, or prediction.
"""
if mode == tf.estimator.ModeKeys.PREDICT:
if params['include_groundtruth_in_features'] and ('labels' in features):
# In include groundtruth for eval.
labels = features['labels']
else:
labels = None
if 'features' in features:
features = features['features']
# Otherwise, it is in export mode, the features is past in directly.
if params['use_bfloat16']:
with tf.contrib.tpu.bfloat16_scope():
model_outputs = build_model_graph(
features, labels, mode == tf.estimator.ModeKeys.TRAIN, params)
model_outputs.update({
'source_id': features['source_ids'],
'image_info': features['image_info'],
})
def cast_outputs_to_float(d):
for k, v in sorted(six.iteritems(d)):
if isinstance(v, dict):
cast_outputs_to_float(v)
else:
d[k] = tf.cast(v, tf.float32)
cast_outputs_to_float(model_outputs)
else:
model_outputs = build_model_graph(
features, labels, mode == tf.estimator.ModeKeys.TRAIN, params)
model_outputs.update({
'source_id': features['source_ids'],
'image_info': features['image_info'],
})
if mode == tf.estimator.ModeKeys.PREDICT and 'orig_images' in features:
model_outputs['orig_images'] = features['orig_images']
# First check if it is in PREDICT mode.
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = {}
if labels and params['include_groundtruth_in_features']:
# Labels can only be emebeded in predictions. The predition cannot output
# dictionary as a value.
predictions.update(labels)
model_outputs.pop('fpn_features', None)
predictions.update(model_outputs)
if params['use_tpu']:
return tf.contrib.tpu.TPUEstimatorSpec(mode=mode, predictions=predictions)
return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions)
# Set up training loss and learning rate.
global_step = tf.train.get_or_create_global_step()
learning_rate = learning_rates.step_learning_rate_with_linear_warmup(
global_step,
params['init_learning_rate'],
params['warmup_learning_rate'],
params['warmup_steps'],
params['learning_rate_levels'],
params['learning_rate_steps'])
# score_loss and box_loss are for logging. only total_loss is optimized.
total_rpn_loss, rpn_score_loss, rpn_box_loss = losses.rpn_loss(
model_outputs['rpn_score_outputs'], model_outputs['rpn_box_outputs'],
labels, params)
(total_fast_rcnn_loss, fast_rcnn_class_loss,
fast_rcnn_box_loss) = losses.fast_rcnn_loss(
model_outputs['class_outputs'], model_outputs['box_outputs'],
model_outputs['class_targets'], model_outputs['box_targets'], params)
# Only training has the mask loss. Reference: https://github.com/facebookresearch/Detectron/blob/master/detectron/modeling/model_builder.py # pylint: disable=line-too-long
if mode == tf.estimator.ModeKeys.TRAIN and params['include_mask']:
mask_loss = losses.mask_rcnn_loss(
model_outputs['mask_outputs'], model_outputs['mask_targets'],
model_outputs['selected_class_targets'], params)
else:
mask_loss = 0.
if variable_filter_fn and ('resnet' in params['backbone']):
var_list = variable_filter_fn(tf.trainable_variables(),
params['backbone'] + '/')
else:
var_list = tf.trainable_variables()
l2_regularization_loss = params['l2_weight_decay'] * tf.add_n([
tf.nn.l2_loss(v)
for v in var_list
if 'batch_normalization' not in v.name and 'bias' not in v.name
])
total_loss = (total_rpn_loss + total_fast_rcnn_loss + mask_loss +
l2_regularization_loss)
host_call = None
if mode == tf.estimator.ModeKeys.TRAIN:
optimizer = create_optimizer(learning_rate, params)
if params['use_tpu']:
optimizer = tf.contrib.tpu.CrossShardOptimizer(optimizer)
scaffold_fn = None
if params['warm_start_path']:
def warm_start_scaffold_fn():
tf.logging.info(
'model_fn warm start from: %s",' % params['warm_start_path'])
assignment_map = _build_assigment_map(
optimizer,
prefix=None,
skip_variables_regex=params['skip_checkpoint_variables'])
tf.train.init_from_checkpoint(params['warm_start_path'], assignment_map)
return tf.train.Scaffold()
scaffold_fn = warm_start_scaffold_fn
elif params['checkpoint']:
def backbone_scaffold_fn():
"""Loads pretrained model through scaffold function."""
# Exclude all variable of optimizer.
vars_to_load = _build_assigment_map(
optimizer,
prefix=params['backbone'] + '/',
skip_variables_regex=params['skip_checkpoint_variables'])
tf.train.init_from_checkpoint(params['checkpoint'], vars_to_load)
if not vars_to_load:
raise ValueError('Variables to load is empty.')
return tf.train.Scaffold()
scaffold_fn = backbone_scaffold_fn
# Batch norm requires update_ops to be added as a train_op dependency.
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
grads_and_vars = optimizer.compute_gradients(total_loss, var_list)
if params['global_gradient_clip_ratio'] > 0:
# Clips the gradients for training stability.
# Refer: https://arxiv.org/abs/1211.5063
with tf.name_scope('clipping'):
old_grads, variables = zip(*grads_and_vars)
num_weights = sum(
g.shape.num_elements() for g in old_grads if g is not None)
clip_norm = params['global_gradient_clip_ratio'] * math.sqrt(
num_weights)
tf.logging.info(
'Global clip norm set to %g for %d variables with %d elements.' %
(clip_norm, sum(1 for g in old_grads if g is not None),
num_weights))
gradients, _ = tf.clip_by_global_norm(old_grads, clip_norm)
else:
gradients, variables = zip(*grads_and_vars)
grads_and_vars = []
# Special treatment for biases (beta is named as bias in reference model)
# Reference: https://github.com/facebookresearch/Detectron/blob/master/detectron/modeling/optimizer.py#L113 # pylint: disable=line-too-long
for grad, var in zip(gradients, variables):
if grad is not None and ('beta' in var.name or 'bias' in var.name):
grad = 2.0 * grad
grads_and_vars.append((grad, var))
minimize_op = optimizer.apply_gradients(grads_and_vars,
global_step=global_step)
with tf.control_dependencies(update_ops):
train_op = minimize_op
if params['use_host_call']:
def host_call_fn(global_step, total_loss, total_rpn_loss, rpn_score_loss,
rpn_box_loss, total_fast_rcnn_loss, fast_rcnn_class_loss,
fast_rcnn_box_loss, mask_loss, learning_rate):
"""Training host call. Creates scalar summaries for training metrics.
This function is executed on the CPU and should not directly reference
any Tensors in the rest of the `model_fn`. To pass Tensors from the
model to the `metric_fn`, provide as part of the `host_call`. See
https://www.tensorflow.org/api_docs/python/tf/contrib/tpu/TPUEstimatorSpec
for more information.
Arguments should match the list of `Tensor` objects passed as the second
element in the tuple passed to `host_call`.
Args:
global_step: `Tensor with shape `[batch, ]` for the global_step.
total_loss: `Tensor` with shape `[batch, ]` for the training loss.
total_rpn_loss: `Tensor` with shape `[batch, ]` for the training RPN
loss.
rpn_score_loss: `Tensor` with shape `[batch, ]` for the training RPN
score loss.
rpn_box_loss: `Tensor` with shape `[batch, ]` for the training RPN
box loss.
total_fast_rcnn_loss: `Tensor` with shape `[batch, ]` for the
training Mask-RCNN loss.
fast_rcnn_class_loss: `Tensor` with shape `[batch, ]` for the
training Mask-RCNN class loss.
fast_rcnn_box_loss: `Tensor` with shape `[batch, ]` for the
training Mask-RCNN box loss.
mask_loss: `Tensor` with shape `[batch, ]` for the training Mask-RCNN
mask loss.
learning_rate: `Tensor` with shape `[batch, ]` for the learning_rate.
Returns:
List of summary ops to run on the CPU host.
"""
# Outfeed supports int32 but global_step is expected to be int64.
global_step = tf.reduce_mean(global_step)
# Host call fns are executed FLAGS.iterations_per_loop times after one
# TPU loop is finished, setting max_queue value to the same as number of
# iterations will make the summary writer only flush the data to storage
# once per loop.
with (tf.contrib.summary.create_file_writer(
params['model_dir'],
max_queue=params['iterations_per_loop']).as_default()):
with tf.contrib.summary.always_record_summaries():
tf.contrib.summary.scalar(
'total_loss', tf.reduce_mean(total_loss), step=global_step)
tf.contrib.summary.scalar(
'total_rpn_loss', tf.reduce_mean(total_rpn_loss),
step=global_step)
tf.contrib.summary.scalar(
'rpn_score_loss', tf.reduce_mean(rpn_score_loss),
step=global_step)
tf.contrib.summary.scalar(
'rpn_box_loss', tf.reduce_mean(rpn_box_loss), step=global_step)
tf.contrib.summary.scalar(
'total_fast_rcnn_loss', tf.reduce_mean(total_fast_rcnn_loss),
step=global_step)
tf.contrib.summary.scalar(
'fast_rcnn_class_loss', tf.reduce_mean(fast_rcnn_class_loss),
step=global_step)
tf.contrib.summary.scalar(
'fast_rcnn_box_loss', tf.reduce_mean(fast_rcnn_box_loss),
step=global_step)
if params['include_mask']:
tf.contrib.summary.scalar(
'mask_loss', tf.reduce_mean(mask_loss), step=global_step)
tf.contrib.summary.scalar(
'learning_rate', tf.reduce_mean(learning_rate),
step=global_step)
return tf.contrib.summary.all_summary_ops()
# To log the loss, current learning rate, and epoch for Tensorboard, the
# summary op needs to be run on the host CPU via host_call. host_call
# expects [batch_size, ...] Tensors, thus reshape to introduce a batch
# dimension. These Tensors are implicitly concatenated to
# [params['batch_size']].
global_step_t = tf.reshape(global_step, [1])
total_loss_t = tf.reshape(total_loss, [1])
total_rpn_loss_t = tf.reshape(total_rpn_loss, [1])
rpn_score_loss_t = tf.reshape(rpn_score_loss, [1])
rpn_box_loss_t = tf.reshape(rpn_box_loss, [1])
total_fast_rcnn_loss_t = tf.reshape(total_fast_rcnn_loss, [1])
fast_rcnn_class_loss_t = tf.reshape(fast_rcnn_class_loss, [1])
fast_rcnn_box_loss_t = tf.reshape(fast_rcnn_box_loss, [1])
mask_loss_t = tf.reshape(mask_loss, [1])
learning_rate_t = tf.reshape(learning_rate, [1])
host_call = (host_call_fn,
[global_step_t, total_loss_t, total_rpn_loss_t,
rpn_score_loss_t, rpn_box_loss_t, total_fast_rcnn_loss_t,
fast_rcnn_class_loss_t, fast_rcnn_box_loss_t,
mask_loss_t, learning_rate_t])
else:
train_op = None
scaffold_fn = None
if params['use_tpu']:
return tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
train_op=train_op,
host_call=host_call,
scaffold_fn=scaffold_fn)
return tf.estimator.EstimatorSpec(
mode=mode, loss=total_loss, train_op=train_op)
def mask_rcnn_model_fn(features, labels, mode, params):
"""Mask-RCNN model."""
with tf.variable_scope('', reuse=tf.AUTO_REUSE):
return _model_fn(
features,
labels,
mode,
params,
variable_filter_fn=remove_variables)
| 40.043624 | 174 | 0.678371 |
c11b07c2b9e6d1b54af5425890f45537e948da5c | 971 | py | Python | setup.py | Daulbaev/adversarial-library | 6f979a511ad78908374cd55855a9e2c5a874be7d | [
"BSD-3-Clause"
] | 55 | 2020-11-25T10:47:48.000Z | 2022-03-21T12:11:31.000Z | setup.py | Daulbaev/adversarial-library | 6f979a511ad78908374cd55855a9e2c5a874be7d | [
"BSD-3-Clause"
] | 4 | 2021-03-10T19:25:31.000Z | 2021-08-06T00:10:49.000Z | setup.py | Daulbaev/adversarial-library | 6f979a511ad78908374cd55855a9e2c5a874be7d | [
"BSD-3-Clause"
] | 8 | 2020-11-26T08:42:04.000Z | 2022-01-13T02:55:47.000Z | import os
from distutils.core import setup
from setuptools import find_packages
setup_path = os.path.abspath(os.path.dirname(__file__))
setup(name='adv_lib',
version='0.1',
url='https://github.com/jeromerony/adversarial-library',
maintainer='Jerome Rony',
maintainer_email='jerome.rony@gmail.com',
description='Library of various adversarial resources in PyTorch',
author='Jerome Rony',
author_email='jerome.rony@gmail.com',
classifiers=[
'Development Status :: 1 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3.8',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
python_requires='>=3.8',
install_requires=[
'torch>=1.7.0',
'torchvision>=0.8.0',
'tqdm>=4.48.0',
'visdom>=0.1.8',
],
packages=find_packages())
| 31.322581 | 72 | 0.61586 |
87a9b57f06a293deb0cb08fb00c5a8a908f6d5e0 | 964 | py | Python | tests/hooks/test_configure_app_hook.py | briancappello/flask-unchained | bff296b5c808f5b1db10f7dddb81054600545749 | [
"MIT"
] | 69 | 2018-10-10T01:59:11.000Z | 2022-03-29T17:29:30.000Z | tests/hooks/test_configure_app_hook.py | briancappello/flask-unchained | bff296b5c808f5b1db10f7dddb81054600545749 | [
"MIT"
] | 18 | 2018-11-17T12:42:02.000Z | 2021-05-22T18:45:27.000Z | tests/hooks/test_configure_app_hook.py | briancappello/flask-unchained | bff296b5c808f5b1db10f7dddb81054600545749 | [
"MIT"
] | 7 | 2018-10-12T16:20:25.000Z | 2021-10-06T12:18:21.000Z | import pytest
from flask_unchained.constants import DEV
from flask_unchained.hooks.configure_app_hook import ConfigureAppHook
from flask_unchained.unchained import Unchained
from .._bundles.myapp import MyAppBundle
from .._bundles.empty_bundle import EmptyBundle
from .._bundles.vendor_bundle import VendorBundle
@pytest.fixture
def hook():
return ConfigureAppHook(Unchained(DEV))
class TestConfigureAppHook:
def test_later_bundle_configs_override_earlier_ones(self, app,
hook: ConfigureAppHook):
hook.run_hook(app, [VendorBundle(), EmptyBundle(), MyAppBundle()])
assert app.config.APP_KEY == 'app_key'
assert app.config.VENDOR_KEY1 == 'app_override'
assert app.config.VENDOR_KEY2 == 'vendor_key2'
def test_the_app_bundle_config_module_is_named_config(self, hook: ConfigureAppHook):
assert hook.get_bundle_module_names(MyAppBundle()) == ['config']
| 34.428571 | 88 | 0.735477 |
0adcadd1e2728b1dea0e5cb7fc8f3218b6b66b93 | 61 | py | Python | tests/__init__.py | ibejohn818/sshtun | bdbc8f7de9ec7c54daffaaf0e60e23eb8f24988a | [
"MIT"
] | null | null | null | tests/__init__.py | ibejohn818/sshtun | bdbc8f7de9ec7c54daffaaf0e60e23eb8f24988a | [
"MIT"
] | null | null | null | tests/__init__.py | ibejohn818/sshtun | bdbc8f7de9ec7c54daffaaf0e60e23eb8f24988a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Unit test package for sshtun."""
| 15.25 | 35 | 0.557377 |
6f2ff7b5eb4102a7e855a059c12968c93e93fe87 | 526 | py | Python | core/migrations/0022_auto_20161214_1702.py | rafaelbantu/timtec | 86c51b7440a044704ed33c3e752a6cf6b15ceae3 | [
"BSD-3-Clause"
] | 21 | 2015-09-23T14:07:16.000Z | 2022-02-18T01:35:18.000Z | core/migrations/0022_auto_20161214_1702.py | rafaelbantu/timtec | 86c51b7440a044704ed33c3e752a6cf6b15ceae3 | [
"BSD-3-Clause"
] | 178 | 2016-05-10T16:16:19.000Z | 2021-12-15T20:21:21.000Z | core/migrations/0022_auto_20161214_1702.py | rafaelbantu/timtec | 86c51b7440a044704ed33c3e752a6cf6b15ceae3 | [
"BSD-3-Clause"
] | 18 | 2015-10-23T13:28:17.000Z | 2021-09-22T13:08:28.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('core', '0020_professormessage_users_that_read'),
]
operations = [
migrations.AlterField(
model_name='professormessage',
name='users_that_read',
field=models.ManyToManyField(related_name='read_messages', null=True, to=settings.AUTH_USER_MODEL),
),
]
| 25.047619 | 111 | 0.671103 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.