blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cd2467a1fdcd3909917783859542b8cf97f59f5b | ef6229d281edecbea3faad37830cb1d452d03e5b | /ucsmsdk/mometa/vm/VmSwitch.py | bcea418e58ba6b99d43c18c8a4e4364eee01b6ce | [
"Apache-2.0"
] | permissive | anoop1984/python_sdk | 0809be78de32350acc40701d6207631322851010 | c4a226bad5e10ad233eda62bc8f6d66a5a82b651 | refs/heads/master | 2020-12-31T00:18:57.415950 | 2016-04-26T17:39:38 | 2016-04-26T17:39:38 | 57,148,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,917 | py | """This module contains the general information for VmSwitch ManagedObject."""
import sys, os
from ...ucsmo import ManagedObject
from ...ucscoremeta import UcsVersion, MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class VmSwitchConsts():
ADMIN_STATE_DISABLE = "disable"
ADMIN_STATE_ENABLE = "enable"
INT_ID_NONE = "none"
MANAGER_RHEV_M = "rhev-m"
MANAGER_SCVMM = "scvmm"
MANAGER_UNMANAGED = "unmanaged"
MANAGER_VCENTER = "vcenter"
OWN_DISCOVERED = "discovered"
OWN_MANAGED = "managed"
POLICY_OWNER_LOCAL = "local"
POLICY_OWNER_PENDING_POLICY = "pending-policy"
POLICY_OWNER_POLICY = "policy"
VENDOR_MICROSOFT = "microsoft"
VENDOR_UNDETERMINED = "undetermined"
VENDOR_VMWARE = "vmware"
class VmSwitch(ManagedObject):
"""This is VmSwitch class."""
consts = VmSwitchConsts()
naming_props = set([u'name'])
mo_meta = MoMeta("VmSwitch", "vmSwitch", "switch-[name]", VersionMeta.Version111j, "InputOutput", 0xfff, [], ["admin", "ls-config", "ls-config-policy", "ls-network", "pn-policy"], [u'extvmmProvider', u'extvmmSwitchSet', u'vmOrg'], [u'extvmmUpLinkPP', u'vmVnicProfInst'], ["Add", "Get", "Remove", "Set"])
prop_meta = {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version111j, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["disable", "enable"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version111j, MoPropertyMeta.INTERNAL, 0x4, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version111j, MoPropertyMeta.READ_WRITE, 0x8, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []),
"ext_key": MoPropertyMeta("ext_key", "extKey", "string", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, None, 1, 33, None, [], []),
"flt_aggr": MoPropertyMeta("flt_aggr", "fltAggr", "ulong", VersionMeta.Version221b, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"id": MoPropertyMeta("id", "id", "string", VersionMeta.Version201m, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""[\-\.:_a-zA-Z0-9]{1,40}""", [], []),
"int_id": MoPropertyMeta("int_id", "intId", "string", VersionMeta.Version111j, MoPropertyMeta.INTERNAL, None, None, None, None, ["none"], ["0-4294967295"]),
"key_inst": MoPropertyMeta("key_inst", "keyInst", "ushort", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"manager": MoPropertyMeta("manager", "manager", "string", VersionMeta.Version201m, MoPropertyMeta.READ_WRITE, 0x40, None, None, None, ["rhev-m", "scvmm", "unmanaged", "vcenter"], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version111j, MoPropertyMeta.NAMING, 0x80, None, None, r"""[ !#$%&\(\)\*\+,\-\.:;=\?@\[\]_\{\|\}~a-zA-Z0-9]{1,16}""", [], []),
"own": MoPropertyMeta("own", "own", "string", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, None, None, None, None, ["discovered", "managed"], []),
"policy_level": MoPropertyMeta("policy_level", "policyLevel", "uint", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"policy_owner": MoPropertyMeta("policy_owner", "policyOwner", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["local", "pending-policy", "policy"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, 0x200, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version111j, MoPropertyMeta.READ_WRITE, 0x400, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"uuid": MoPropertyMeta("uuid", "uuid", "string", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, None, None, None, r"""(([0-9a-fA-F]){8}\-([0-9a-fA-F]){4}\-([0-9a-fA-F]){4}\-([0-9a-fA-F]){4}\-([0-9a-fA-F]){12})|0""", [], []),
"vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version221b, MoPropertyMeta.READ_WRITE, 0x800, None, None, None, ["microsoft", "undetermined", "vmware"], []),
}
prop_map = {
"adminState": "admin_state",
"childAction": "child_action",
"descr": "descr",
"dn": "dn",
"extKey": "ext_key",
"fltAggr": "flt_aggr",
"id": "id",
"intId": "int_id",
"keyInst": "key_inst",
"manager": "manager",
"name": "name",
"own": "own",
"policyLevel": "policy_level",
"policyOwner": "policy_owner",
"rn": "rn",
"sacl": "sacl",
"status": "status",
"uuid": "uuid",
"vendor": "vendor",
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.admin_state = None
self.child_action = None
self.descr = None
self.ext_key = None
self.flt_aggr = None
self.id = None
self.int_id = None
self.key_inst = None
self.manager = None
self.own = None
self.policy_level = None
self.policy_owner = None
self.sacl = None
self.status = None
self.uuid = None
self.vendor = None
ManagedObject.__init__(self, "VmSwitch", parent_mo_or_dn, **kwargs)
| [
"test@cisco.com"
] | test@cisco.com |
f4f38c9c5d24372ddfff33125b42134aab81c2e2 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /QcswPnY2cAbrfwuWE_0.py | 7ec3b3b0c8567e7f036275c1cc969cd1b5fc4448 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 638 | py | """
Create a function that filters out factorials from a list. A factorial is a
number that can be represented in the following manner:
n! = n * (n-1) * (n-2) * ... * 3 * 2 * 1
Recursively, this can be represented as:
n! = n * (n-1)!
### Examples
filter_factorials([1, 2, 3, 4, 5, 6, 7]) ➞ [1, 2, 6]
filter_factorials([1, 4, 120]) ➞ [1, 120]
filter_factorials([8, 9, 10]) ➞ []
### Notes
N/A
"""
def filter_factorials(numbers):
factorials=[1]
n=max(numbers)
temp=1
for i in range(1,n+1):
temp*=i
factorials.append(temp)
return [i for i in numbers if i in factorials]
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
97e6d00f92412bbd534ab4503951218c1842d523 | 908b5e9f5246309b45cf14ea0f7f2cc39c3853f1 | /build/vrpn_client_ros/catkin_generated/pkg.installspace.context.pc.py | cab6a546b2ff257f05d4079c96aedd1f54e26bd6 | [] | no_license | crvogt/vicon_ws | 4a2cc0aa2d1403edcf9240b545a77ca9c1e038e8 | ab474b7eb127c12aefdde1d2055cc4cdce0db952 | refs/heads/master | 2021-07-15T11:43:31.987944 | 2018-03-05T17:35:25 | 2018-03-05T17:35:25 | 95,583,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 615 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/carson/vicon_ws/install/include;/opt/ros/indigo/include".split(';') if "/home/carson/vicon_ws/install/include;/opt/ros/indigo/include" != "" else []
PROJECT_CATKIN_DEPENDS = "geometry_msgs;tf2_ros".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lvrpn_client_ros;/opt/ros/indigo/lib/libvrpn.a".split(';') if "-lvrpn_client_ros;/opt/ros/indigo/lib/libvrpn.a" != "" else []
PROJECT_NAME = "vrpn_client_ros"
PROJECT_SPACE_DIR = "/home/carson/vicon_ws/install"
PROJECT_VERSION = "0.1.1"
| [
"crvogt26@gmail.com"
] | crvogt26@gmail.com |
fe420ba34f5b3c6a319b46161e88ec9faaf9962f | d41d18d3ea6edd2ec478b500386375a8693f1392 | /plotly/validators/scattergeo/marker/_colorscale.py | 0d3a158256a6b78a1a74b5260a7e1f03ecdff770 | [
"MIT"
] | permissive | miladrux/plotly.py | 38921dd6618650d03be9891d6078e771ffccc99a | dbb79e43e2cc6c5762251537d24bad1dab930fff | refs/heads/master | 2020-03-27T01:46:57.497871 | 2018-08-20T22:37:38 | 2018-08-20T22:37:38 | 145,742,203 | 1 | 0 | MIT | 2018-08-22T17:37:07 | 2018-08-22T17:37:07 | null | UTF-8 | Python | false | false | 512 | py | import _plotly_utils.basevalidators
class ColorscaleValidator(_plotly_utils.basevalidators.ColorscaleValidator):
def __init__(
self,
plotly_name='colorscale',
parent_name='scattergeo.marker',
**kwargs
):
super(ColorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='calc',
implied_edits={'autocolorscale': False},
role='style',
**kwargs
)
| [
"adam.kulidjian@gmail.com"
] | adam.kulidjian@gmail.com |
14a3817962cd3561dba203d203fc978f818f205a | e8ae11e5017507da59e2e92d423b6a1994490de4 | /env/lib/python2.7/site-packages/azure/mgmt/commerce/models/usage_management_client_enums.py | d21d71ad515dbba392389fc344bfc79a31bdc8fe | [] | no_license | teopeurt/ansible-ubuntu-server | 613d00cea28bc6531acf4a39aeeb9cd0baa2a391 | b5b6127d2ee9723c5088443efe2ffb8ae30cfea7 | refs/heads/master | 2021-06-28T12:49:50.935753 | 2017-07-31T17:34:33 | 2017-07-31T17:34:33 | 98,912,808 | 0 | 1 | null | 2020-07-24T00:05:31 | 2017-07-31T17:32:56 | Makefile | UTF-8 | Python | false | false | 1,056 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class AggregationGranularity(Enum):
daily = "Daily"
hourly = "Hourly"
| [
"me@teopeurt.com"
] | me@teopeurt.com |
d6d7c0f0a9eaba7901aa642d5230cb6d2c6d8f1f | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2692/59018/260917.py | 470f6781ca1279f4085c9ea366a973489def6d5f | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 561 | py | def shipWithinDays(self, weights):
n=len(weights)
left=max(weights)
right=sum(weights)
res=left
while left<=right:
mid=(left+right)//2
count=0
su=0
for i in range(n):
su+=weights[i]
if su>mid:
count+=1
su=weights[i]
count+=1
if count<=D:
res=mid
right=mid-1
else:
left=mid+1
return res
info=input()[1:-1].split(',')
List=[int(y) for y in info]
D=int(input())
print(shipWithinDays(List,D)) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
fbc807e7929bbfec16ca58eddeea72496a47c500 | 542808066eb1d9ef9d3d0bb3f24e8390c3f73694 | /lib/googlecloudsdk/third_party/apis/resourceviews/v1beta1/resourceviews_v1beta1_client.py | cb53fb5958a84fa74f961f8539bd2f32c0e0f2ed | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | drincruz/google-cloud-sdk | b5611ed60a4a1e6d3177a2961c76de9de22b2176 | f67580f778aea376c90a250745c5e0f85634a8fd | refs/heads/master | 2021-01-10T12:22:23.292064 | 2015-12-16T06:59:29 | 2015-12-16T06:59:29 | 48,093,093 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 18,642 | py | """Generated client library for resourceviews version v1beta1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from googlecloudsdk.third_party.apitools.base.py import base_api
from googlecloudsdk.third_party.apis.resourceviews.v1beta1 import resourceviews_v1beta1_messages as messages
class ResourceviewsV1beta1(base_api.BaseApiClient):
"""Generated client library for service resourceviews version v1beta1."""
MESSAGES_MODULE = messages
_PACKAGE = u'resourceviews'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/cloud-platform.read-only', u'https://www.googleapis.com/auth/compute', u'https://www.googleapis.com/auth/compute.readonly', u'https://www.googleapis.com/auth/ndev.cloudman', u'https://www.googleapis.com/auth/ndev.cloudman.readonly']
_VERSION = u'v1beta1'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = ''
_CLIENT_CLASS_NAME = u'ResourceviewsV1beta1'
_URL_VERSION = u'v1beta1'
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None):
"""Create a new resourceviews handle."""
url = url or u'https://www.googleapis.com/resourceviews/v1beta1/'
super(ResourceviewsV1beta1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers)
self.regionViews = self.RegionViewsService(self)
self.zoneViews = self.ZoneViewsService(self)
class RegionViewsService(base_api.BaseApiService):
"""Service class for the regionViews resource."""
_NAME = u'regionViews'
def __init__(self, client):
super(ResourceviewsV1beta1.RegionViewsService, self).__init__(client)
self._method_configs = {
'Addresources': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'resourceviews.regionViews.addresources',
ordered_params=[u'projectName', u'region', u'resourceViewName'],
path_params=[u'projectName', u'region', u'resourceViewName'],
query_params=[],
relative_path=u'projects/{projectName}/regions/{region}/resourceViews/{resourceViewName}/addResources',
request_field=u'regionViewsAddResourcesRequest',
request_type_name=u'ResourceviewsRegionViewsAddresourcesRequest',
response_type_name=u'ResourceviewsRegionViewsAddresourcesResponse',
supports_download=False,
),
'Delete': base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'resourceviews.regionViews.delete',
ordered_params=[u'projectName', u'region', u'resourceViewName'],
path_params=[u'projectName', u'region', u'resourceViewName'],
query_params=[],
relative_path=u'projects/{projectName}/regions/{region}/resourceViews/{resourceViewName}',
request_field='',
request_type_name=u'ResourceviewsRegionViewsDeleteRequest',
response_type_name=u'ResourceviewsRegionViewsDeleteResponse',
supports_download=False,
),
'Get': base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'resourceviews.regionViews.get',
ordered_params=[u'projectName', u'region', u'resourceViewName'],
path_params=[u'projectName', u'region', u'resourceViewName'],
query_params=[],
relative_path=u'projects/{projectName}/regions/{region}/resourceViews/{resourceViewName}',
request_field='',
request_type_name=u'ResourceviewsRegionViewsGetRequest',
response_type_name=u'ResourceView',
supports_download=False,
),
'Insert': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'resourceviews.regionViews.insert',
ordered_params=[u'projectName', u'region'],
path_params=[u'projectName', u'region'],
query_params=[],
relative_path=u'projects/{projectName}/regions/{region}/resourceViews',
request_field=u'resourceView',
request_type_name=u'ResourceviewsRegionViewsInsertRequest',
response_type_name=u'RegionViewsInsertResponse',
supports_download=False,
),
'List': base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'resourceviews.regionViews.list',
ordered_params=[u'projectName', u'region'],
path_params=[u'projectName', u'region'],
query_params=[u'maxResults', u'pageToken'],
relative_path=u'projects/{projectName}/regions/{region}/resourceViews',
request_field='',
request_type_name=u'ResourceviewsRegionViewsListRequest',
response_type_name=u'RegionViewsListResponse',
supports_download=False,
),
'Listresources': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'resourceviews.regionViews.listresources',
ordered_params=[u'projectName', u'region', u'resourceViewName'],
path_params=[u'projectName', u'region', u'resourceViewName'],
query_params=[u'maxResults', u'pageToken'],
relative_path=u'projects/{projectName}/regions/{region}/resourceViews/{resourceViewName}/resources',
request_field='',
request_type_name=u'ResourceviewsRegionViewsListresourcesRequest',
response_type_name=u'RegionViewsListResourcesResponse',
supports_download=False,
),
'Removeresources': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'resourceviews.regionViews.removeresources',
ordered_params=[u'projectName', u'region', u'resourceViewName'],
path_params=[u'projectName', u'region', u'resourceViewName'],
query_params=[],
relative_path=u'projects/{projectName}/regions/{region}/resourceViews/{resourceViewName}/removeResources',
request_field=u'regionViewsRemoveResourcesRequest',
request_type_name=u'ResourceviewsRegionViewsRemoveresourcesRequest',
response_type_name=u'ResourceviewsRegionViewsRemoveresourcesResponse',
supports_download=False,
),
}
self._upload_configs = {
}
def Addresources(self, request, global_params=None):
"""Add resources to the view.
Args:
request: (ResourceviewsRegionViewsAddresourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourceviewsRegionViewsAddresourcesResponse) The response message.
"""
config = self.GetMethodConfig('Addresources')
return self._RunMethod(
config, request, global_params=global_params)
def Delete(self, request, global_params=None):
"""Delete a resource view.
Args:
request: (ResourceviewsRegionViewsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourceviewsRegionViewsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
def Get(self, request, global_params=None):
"""Get the information of a resource view.
Args:
request: (ResourceviewsRegionViewsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourceView) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
def Insert(self, request, global_params=None):
"""Create a resource view.
Args:
request: (ResourceviewsRegionViewsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionViewsInsertResponse) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
def List(self, request, global_params=None):
"""List resource views.
Args:
request: (ResourceviewsRegionViewsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionViewsListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
def Listresources(self, request, global_params=None):
"""List the resources in the view.
Args:
request: (ResourceviewsRegionViewsListresourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionViewsListResourcesResponse) The response message.
"""
config = self.GetMethodConfig('Listresources')
return self._RunMethod(
config, request, global_params=global_params)
def Removeresources(self, request, global_params=None):
"""Remove resources from the view.
Args:
request: (ResourceviewsRegionViewsRemoveresourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourceviewsRegionViewsRemoveresourcesResponse) The response message.
"""
config = self.GetMethodConfig('Removeresources')
return self._RunMethod(
config, request, global_params=global_params)
class ZoneViewsService(base_api.BaseApiService):
"""Service class for the zoneViews resource."""
_NAME = u'zoneViews'
def __init__(self, client):
super(ResourceviewsV1beta1.ZoneViewsService, self).__init__(client)
self._method_configs = {
'Addresources': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'resourceviews.zoneViews.addresources',
ordered_params=[u'projectName', u'zone', u'resourceViewName'],
path_params=[u'projectName', u'resourceViewName', u'zone'],
query_params=[],
relative_path=u'projects/{projectName}/zones/{zone}/resourceViews/{resourceViewName}/addResources',
request_field=u'zoneViewsAddResourcesRequest',
request_type_name=u'ResourceviewsZoneViewsAddresourcesRequest',
response_type_name=u'ResourceviewsZoneViewsAddresourcesResponse',
supports_download=False,
),
'Delete': base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'resourceviews.zoneViews.delete',
ordered_params=[u'projectName', u'zone', u'resourceViewName'],
path_params=[u'projectName', u'resourceViewName', u'zone'],
query_params=[],
relative_path=u'projects/{projectName}/zones/{zone}/resourceViews/{resourceViewName}',
request_field='',
request_type_name=u'ResourceviewsZoneViewsDeleteRequest',
response_type_name=u'ResourceviewsZoneViewsDeleteResponse',
supports_download=False,
),
'Get': base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'resourceviews.zoneViews.get',
ordered_params=[u'projectName', u'zone', u'resourceViewName'],
path_params=[u'projectName', u'resourceViewName', u'zone'],
query_params=[],
relative_path=u'projects/{projectName}/zones/{zone}/resourceViews/{resourceViewName}',
request_field='',
request_type_name=u'ResourceviewsZoneViewsGetRequest',
response_type_name=u'ResourceView',
supports_download=False,
),
'Insert': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'resourceviews.zoneViews.insert',
ordered_params=[u'projectName', u'zone'],
path_params=[u'projectName', u'zone'],
query_params=[],
relative_path=u'projects/{projectName}/zones/{zone}/resourceViews',
request_field=u'resourceView',
request_type_name=u'ResourceviewsZoneViewsInsertRequest',
response_type_name=u'ZoneViewsInsertResponse',
supports_download=False,
),
'List': base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'resourceviews.zoneViews.list',
ordered_params=[u'projectName', u'zone'],
path_params=[u'projectName', u'zone'],
query_params=[u'maxResults', u'pageToken'],
relative_path=u'projects/{projectName}/zones/{zone}/resourceViews',
request_field='',
request_type_name=u'ResourceviewsZoneViewsListRequest',
response_type_name=u'ZoneViewsListResponse',
supports_download=False,
),
'Listresources': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'resourceviews.zoneViews.listresources',
ordered_params=[u'projectName', u'zone', u'resourceViewName'],
path_params=[u'projectName', u'resourceViewName', u'zone'],
query_params=[u'maxResults', u'pageToken'],
relative_path=u'projects/{projectName}/zones/{zone}/resourceViews/{resourceViewName}/resources',
request_field='',
request_type_name=u'ResourceviewsZoneViewsListresourcesRequest',
response_type_name=u'ZoneViewsListResourcesResponse',
supports_download=False,
),
'Removeresources': base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'resourceviews.zoneViews.removeresources',
ordered_params=[u'projectName', u'zone', u'resourceViewName'],
path_params=[u'projectName', u'resourceViewName', u'zone'],
query_params=[],
relative_path=u'projects/{projectName}/zones/{zone}/resourceViews/{resourceViewName}/removeResources',
request_field=u'zoneViewsRemoveResourcesRequest',
request_type_name=u'ResourceviewsZoneViewsRemoveresourcesRequest',
response_type_name=u'ResourceviewsZoneViewsRemoveresourcesResponse',
supports_download=False,
),
}
self._upload_configs = {
}
def Addresources(self, request, global_params=None):
"""Add resources to the view.
Args:
request: (ResourceviewsZoneViewsAddresourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourceviewsZoneViewsAddresourcesResponse) The response message.
"""
config = self.GetMethodConfig('Addresources')
return self._RunMethod(
config, request, global_params=global_params)
def Delete(self, request, global_params=None):
"""Delete a resource view.
Args:
request: (ResourceviewsZoneViewsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourceviewsZoneViewsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
def Get(self, request, global_params=None):
"""Get the information of a zonal resource view.
Args:
request: (ResourceviewsZoneViewsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourceView) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
def Insert(self, request, global_params=None):
"""Create a resource view.
Args:
request: (ResourceviewsZoneViewsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ZoneViewsInsertResponse) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
def List(self, request, global_params=None):
"""List resource views.
Args:
request: (ResourceviewsZoneViewsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ZoneViewsListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
def Listresources(self, request, global_params=None):
"""List the resources of the resource view.
Args:
request: (ResourceviewsZoneViewsListresourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ZoneViewsListResourcesResponse) The response message.
"""
config = self.GetMethodConfig('Listresources')
return self._RunMethod(
config, request, global_params=global_params)
def Removeresources(self, request, global_params=None):
"""Remove resources from the view.
Args:
request: (ResourceviewsZoneViewsRemoveresourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourceviewsZoneViewsRemoveresourcesResponse) The response message.
"""
config = self.GetMethodConfig('Removeresources')
return self._RunMethod(
config, request, global_params=global_params)
| [
"drincruz@gmail.com"
] | drincruz@gmail.com |
7087294a553afa1827fbbbeb4e45da8aad0c0e73 | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/G/gallagher/national_charities_1.py | 082757a5bce2d3be2fc77da41446581313c21c7d | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,106 | py | import scraperwiki
from scrapemark import scrape
import urllib
import urllib2
import simplejson
state_urls = ["http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Alabama"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Alaska"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Arizona"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Arkansas"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=California"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Colorado"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Connecticut"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Delaware"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Florida"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Georgia"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Hawaii"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Idaho"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Illinois"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Indiana"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Iowa"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Kansas"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Kentucky"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Louisiana"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Maine"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Maryland"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Massachusetts"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Michigan"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Minnesota"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Mississippi"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Missouri"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Montana"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Nebraska"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Nevada"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=New+Hampshire"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=New+Jersey"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=New+Mexico"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=New+York"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=North+Carolina"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=North+Dakota"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Ohio"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Oklahoma"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Oregon"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Pennsylvania"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Rhode+Island"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=South+Carolina"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=South+Dakota"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Tennessee"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Texas"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Utah"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Vermont"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Virginia"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Washington"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=West+Virginia"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Wisconsin"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Wyoming"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=District+of+Columbia"]
#state_pack = [state_urls[0]]
#state_pack = [state_urls[1],state_urls[2],state_urls[3]]
state_pack = [state_urls[4]]
#state_pack = [state_urls[5],state_urls[6],state_urls[7],state_urls[8],state_urls[9]]
#state_pack = [state_urls[10],state_urls[11],state_urls[12],state_urls[13],state_urls[14]]
#state_pack = [state_urls[15],state_urls[16],state_urls[17],state_urls[18],state_urls[19]]
#state_pack = [state_urls[20],state_urls[21],state_urls[22],state_urls[23],state_urls[24]]]
#state_pack = [state_urls[25],state_urls[26],state_urls[27],state_urls[28],state_urls[29]]
#state_pack = [state_urls[30],state_urls[31],state_urls[32],state_urls[33],state_urls[34]]
#state_pack = [state_urls[35],state_urls[36],state_urls[37],state_urls[38],state_urls[39]]
#state_pack = [state_urls[40],state_urls[41],state_urls[42],state_urls[43],state_urls[44]]
#state_pack = [state_urls[45],state_urls[46],state_urls[47],state_urls[48],state_urls[49]]
#state_pack = [state_urls[50]]
#print homepage
for item in state_pack:
page = scraperwiki.scrape(item)
#print item
list_scrape = scrape("""
<script type="text/javascript"></script>
{*
<tr><td width="90%"><a href="{{[list].url}}"><strong></strong></a>
*}
""",
page)['list']
for np_url in list_scrape:
np_page = scraperwiki.scrape(np_url['url'])
np_scrape = scrape("""
<TD width="50%" valign="top" align="left">
{* <B> {{[np].name}}</B>
<br>
<B><br>{{[np].address}}
<br>{{[np].city_state}}<br> Phone Number: {{[np].phone}} </B> *}
{* <br />Visit Website: <a>{{[np].url}}</a>*}
<a border="0" rel="nofollow"> </a>
{* <script></script><script></script>
<br />{{[np].description}}<br /><br /><br />NonProfitList.org <div></div>*}
""",
np_page)['np']
full_address = np_scrape[0]['address'] + " " + np_scrape[0]['city_state']
#print full_address
geocode_url = 'http://tasks.arcgisonline.com/ArcGIS/rest/services/Locators/TA_Streets_US_10/GeocodeServer/findAddressCandidates?Single+Line+Input='+urllib.quote_plus(full_address)+'&outFields=&outSR=&f=json'
#print geocode_url
georeq = urllib2.Request(geocode_url)
geo_response = urllib2.urlopen(georeq)
geocode = simplejson.loads(geo_response.read())
#print geocode
if len(geocode['candidates']):
data_lat = geocode['candidates'][0]['location']['y']
data_lng = geocode['candidates'][0]['location']['x']
np_scrape[0]['lat'] = data_lat
np_scrape[0]['lng'] = data_lng
#print data_lat
#print data_lng
scraperwiki.sqlite.save(unique_keys=['name'], data=np_scrape[0])
import scraperwiki
from scrapemark import scrape
import urllib
import urllib2
import simplejson
state_urls = ["http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Alabama"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Alaska"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Arizona"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Arkansas"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=California"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Colorado"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Connecticut"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Delaware"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Florida"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Georgia"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Hawaii"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Idaho"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Illinois"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Indiana"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Iowa"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Kansas"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Kentucky"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Louisiana"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Maine"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Maryland"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Massachusetts"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Michigan"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Minnesota"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Mississippi"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Missouri"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Montana"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Nebraska"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Nevada"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=New+Hampshire"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=New+Jersey"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=New+Mexico"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=New+York"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=North+Carolina"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=North+Dakota"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Ohio"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Oklahoma"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Oregon"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Pennsylvania"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Rhode+Island"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=South+Carolina"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=South+Dakota"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Tennessee"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Texas"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Utah"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Vermont"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Virginia"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Washington"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=West+Virginia"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Wisconsin"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=Wyoming"
,"http://www.nonprofitlist.org/cgi-bin/id/city.cgi?city=&state=District+of+Columbia"]
#state_pack = [state_urls[0]]
#state_pack = [state_urls[1],state_urls[2],state_urls[3]]
state_pack = [state_urls[4]]
#state_pack = [state_urls[5],state_urls[6],state_urls[7],state_urls[8],state_urls[9]]
#state_pack = [state_urls[10],state_urls[11],state_urls[12],state_urls[13],state_urls[14]]
#state_pack = [state_urls[15],state_urls[16],state_urls[17],state_urls[18],state_urls[19]]
#state_pack = [state_urls[20],state_urls[21],state_urls[22],state_urls[23],state_urls[24]]]
#state_pack = [state_urls[25],state_urls[26],state_urls[27],state_urls[28],state_urls[29]]
#state_pack = [state_urls[30],state_urls[31],state_urls[32],state_urls[33],state_urls[34]]
#state_pack = [state_urls[35],state_urls[36],state_urls[37],state_urls[38],state_urls[39]]
#state_pack = [state_urls[40],state_urls[41],state_urls[42],state_urls[43],state_urls[44]]
#state_pack = [state_urls[45],state_urls[46],state_urls[47],state_urls[48],state_urls[49]]
#state_pack = [state_urls[50]]
#print homepage
for item in state_pack:
page = scraperwiki.scrape(item)
#print item
list_scrape = scrape("""
<script type="text/javascript"></script>
{*
<tr><td width="90%"><a href="{{[list].url}}"><strong></strong></a>
*}
""",
page)['list']
for np_url in list_scrape:
np_page = scraperwiki.scrape(np_url['url'])
np_scrape = scrape("""
<TD width="50%" valign="top" align="left">
{* <B> {{[np].name}}</B>
<br>
<B><br>{{[np].address}}
<br>{{[np].city_state}}<br> Phone Number: {{[np].phone}} </B> *}
{* <br />Visit Website: <a>{{[np].url}}</a>*}
<a border="0" rel="nofollow"> </a>
{* <script></script><script></script>
<br />{{[np].description}}<br /><br /><br />NonProfitList.org <div></div>*}
""",
np_page)['np']
full_address = np_scrape[0]['address'] + " " + np_scrape[0]['city_state']
#print full_address
geocode_url = 'http://tasks.arcgisonline.com/ArcGIS/rest/services/Locators/TA_Streets_US_10/GeocodeServer/findAddressCandidates?Single+Line+Input='+urllib.quote_plus(full_address)+'&outFields=&outSR=&f=json'
#print geocode_url
georeq = urllib2.Request(geocode_url)
geo_response = urllib2.urlopen(georeq)
geocode = simplejson.loads(geo_response.read())
#print geocode
if len(geocode['candidates']):
data_lat = geocode['candidates'][0]['location']['y']
data_lng = geocode['candidates'][0]['location']['x']
np_scrape[0]['lat'] = data_lat
np_scrape[0]['lng'] = data_lng
#print data_lat
#print data_lng
scraperwiki.sqlite.save(unique_keys=['name'], data=np_scrape[0])
| [
"pallih@kaninka.net"
] | pallih@kaninka.net |
1e2ecbb223cef7769987b8657dce0290f53d0d56 | 2b167e29ba07e9f577c20c54cb943861d0ccfa69 | /numerical_analysis_backup/small-scale-multiobj/pod150_milp/connections/runsimu3_connections.py | 04598461721bb936e4e2ec41fdd27a4e6aa999ee | [] | no_license | LiYan1988/kthOld_OFC | 17aeeed21e195d1a9a3262ec2e67d6b1d3f9ff0f | b1237577ea68ad735a65981bf29584ebd889132b | refs/heads/master | 2021-01-11T17:27:25.574431 | 2017-01-23T05:32:35 | 2017-01-23T05:32:35 | 79,773,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,438 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 4 15:15:10 2016
@author: li
optimize connections
"""
#import sys
#sys.path.insert(0, '/home/li/Dropbox/KTH/numerical_analysis/ILPs')
import csv
from gurobipy import *
import numpy as np
from arch4_decomposition import Arch4_decompose
from arch1 import ModelSDM_arch1
from arch2_decomposition import Arch2_decompose
from arch5_decomposition import Arch5_decompose
np.random.seed(2010)
num_cores=3
num_slots=80
n_sim = 1 # number of simulations
n_start = 3 # index of start
n_end = n_start+n_sim # index of end
time_limit_routing = 1000 # 1000
time_limit_sa = 18000
alpha = 1
beta = 0
result = np.zeros((n_sim, 15))
total_cnk = []
for i in range(n_start, n_end):
filename = 'traffic_matrix__matrix_'+str(i)+'.csv'
# print filename
tm = []
with open(filename) as f:
reader = csv.reader(f)
for idx, row in enumerate(reader):
if idx>11:
row.pop()
row = [int(u) for u in row]
tm.append(row)
tm = np.array(tm)*25
total_cnk.append(tm.flatten().astype(bool).sum())
result[i-n_start, 14] = tm.flatten().astype(bool).sum()
print "\n"
print total_cnk
print "\n"
#%% arch4
print "Architecture 4"
m = Arch4_decompose(tm, num_slots=num_slots, num_cores=num_cores,alpha=alpha,beta=beta)
m.create_model_routing(mipfocus=1,timelimit=time_limit_routing,mipgap=0.01)
m.create_model_sa(mipfocus=1,timelimit=time_limit_sa)
result[i-n_start, 0] = m.connections_lb
result[i-n_start, 1] = m.connections_ub
result[i-n_start, 2] = m.throughput_lb
result[i-n_start, 3] = m.throughput_ub
#%% arch1
print "Architecutre 1"
m = ModelSDM_arch1(tm, num_slots=num_slots, num_cores=num_cores,alpha=alpha,beta=beta)
m.create_model(mipfocus=1, timelimit=time_limit_routing,mipgap=0.01)
result[i-n_start, 4] = m.connections
result[i-n_start, 5] = m.throughput
#%% arch2
print "Architecture 2"
m = Arch2_decompose(tm, num_slots=num_slots, num_cores=num_cores,alpha=alpha,beta=beta)
m.create_model_routing(mipfocus=1,timelimit=time_limit_routing,mipgap=0.01)
m.create_model_sa(mipfocus=1,timelimit=time_limit_sa)
result[i-n_start, 6] = m.connections_lb
result[i-n_start, 7] = m.connections_ub
result[i-n_start, 8] = m.throughput_lb
result[i-n_start, 9] = m.throughput_ub
#%% arch5
print "Architecture 5"
m = Arch5_decompose(tm, num_slots=num_slots, num_cores=num_cores,alpha=alpha,beta=beta)
m.create_model_routing(mipfocus=1, timelimit=time_limit_routing, mipgap=0.01)
m.create_model_sa(mipfocus=1, timelimit=time_limit_sa)
result[i-n_start, 10] = m.connections_lb
result[i-n_start, 11] = m.connections_ub
result[i-n_start, 12] = m.throughput_lb
result[i-n_start, 13] = m.throughput_ub
file_name = "result_connections_{}to{}.csv".format(n_start, n_end)
with open(file_name, 'w') as f:
writer = csv.writer(f, delimiter=',')
writer.writerow(['arch4_connections_lb', 'arch4_connections_ub',
'arch4_throughput_lb', 'arch4_throughput_ub',
'arch1_connections', 'arch1_throughput',
'arch2_connections_lb', 'arch2_connections_ub',
'arch2_throughput_lb', 'arch2_throughput_ub',
'arch5_connections_lb', 'arch5_connections_ub',
'arch5_throughput_lb', 'arch5_throughput_ub',
'total_cnk'])
writer.writerows(result) | [
"li.yan.ly414@gmail.com"
] | li.yan.ly414@gmail.com |
59cccb9b036905a4dcb9b90f777018c6b23081c2 | 0bde5f7f09aa537ed1f4828d4e5ebee66475918f | /h2o-py/tests/testdir_apis/H2O_Module/pyunit_h2oshow_progress.py | 9ff815774d5e57e9adbef2bcf8bb26e912fabca7 | [
"Apache-2.0"
] | permissive | Winfredemalx54/h2o-3 | d69f1c07e1f5d2540cb0ce5e6073415fa0780d32 | dfb163c82ff3bfa6f88cdf02465a9bb4c8189cb7 | refs/heads/master | 2022-12-14T08:59:04.109986 | 2020-09-23T08:36:59 | 2020-09-23T08:36:59 | 297,947,978 | 2 | 0 | Apache-2.0 | 2020-09-23T11:28:54 | 2020-09-23T11:28:54 | null | UTF-8 | Python | false | false | 1,905 | py | from __future__ import print_function
import sys
sys.path.insert(1,"../../../")
from tests import pyunit_utils
import h2o
try:
from StringIO import StringIO # for python 3
except ImportError:
from io import StringIO # for python 2
from h2o.estimators.glm import H2OGeneralizedLinearEstimator
from h2o.utils.typechecks import assert_is_type
import inspect
def h2oshow_progress():
"""
Python API test: h2o.show_progress()
Command is verified by eyeballing the pyunit test output file and make sure the progress bars are there.
Here, we will assume the command runs well if there is no error message.
"""
try: # only only work with Python 3.
s = StringIO()
sys.stdout = s # redirect output
h2o.show_progress() # true by default.
training_data = h2o.upload_file(pyunit_utils.locate("smalldata/logreg/benign.csv"))
Y = 3
X = [0, 1, 2, 4, 5, 6, 7, 8, 9, 10]
model = H2OGeneralizedLinearEstimator(family="binomial", alpha=0, Lambda=1e-5)
model.train(x=X, y=Y, training_frame=training_data)
sys.stdout=sys.__stdout__ # restore old stdout
# make sure the word progress is found and % is found. That is how progress is displayed.
assert ("progress" in s.getvalue()) and ("100%" in s.getvalue()), "h2o.show_progress() command is not working."
except Exception as e: # will get error for python 2
sys.stdout=sys.__stdout__ # restore old stdout
assert_is_type(e, AttributeError) # error for using python 2
assert "encoding" in e.args[0], "h2o.show_progress() command is not working."
allargs = inspect.getargspec(h2o.show_progress)
assert len(allargs.args)==0, "h2o.show_progress() should have no arguments!"
if __name__ == "__main__":
pyunit_utils.standalone_test(h2oshow_progress)
else:
h2oshow_progress()
| [
"noreply@github.com"
] | Winfredemalx54.noreply@github.com |
ec91569702a15fcfcce3a0a53e0befcdb08371a1 | 7e54d5449b511d06158cfc0e2c928b8656e15ac7 | /sortedm2m_tests/models.py | aeb5846d6754d17787b4d7b7a34694466e9107dd | [
"BSD-3-Clause"
] | permissive | jonny5532/django-sortedm2m | 1f326271ef665c4c26f1f5b631bb9f0b70daf853 | bff0707efcc3257e47355cb2e77ab1abe3c48320 | refs/heads/master | 2021-01-15T18:45:55.789077 | 2012-01-19T10:50:15 | 2012-01-19T10:50:15 | 2,563,529 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 726 | py | # -*- coding: utf-8 -*-
from django.db import models
from sortedm2m.fields import SortedManyToManyField
class Shelf(models.Model):
books = SortedManyToManyField('Book', related_name='shelves')
class Book(models.Model):
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
class Store(models.Model):
books = SortedManyToManyField('sortedm2m_tests.Book', related_name='stores')
class MessyStore(models.Model):
books = SortedManyToManyField('Book',
sorted=False,
related_name='messy_stores')
class SelfReference(models.Model):
me = SortedManyToManyField('self', related_name='hide+')
def __unicode__(self):
return unicode(self.pk)
| [
"gregor@muellegger.de"
] | gregor@muellegger.de |
e90723be4777e7cd41a8cd6bd27535a5ca0d13b8 | 70cdf0741a22c678401a306229003bf036ffe5a6 | /ocbind/bgp/global_/config/__init__.py | 648d1de9718f18a67c4e2c82ee85c12f2674283d | [] | no_license | zsblevins/nanog81-hackathon | 5001e034339d6b0c6452ae2474f06916bcd715cf | 1b64fd207dd69837f947094fbd6d6c1cea3a1070 | refs/heads/main | 2023-03-03T09:39:28.460000 | 2021-02-15T13:41:38 | 2021-02-15T13:41:38 | 336,698,856 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53,498 | py | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp - based on the path /bgp/global/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the global BGP router
"""
__slots__ = ('_path_helper', '_extmethods', '__as_','__router_id',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'global', 'config']
def _get_as_(self):
"""
Getter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
return self.__as_
def _set_as_(self, v, load=False):
"""
Setter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_() directly.
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """as_ must be of a type compatible with oc-inet:as-number""",
'defined-type': "oc-inet:as-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)""",
})
self.__as_ = t
if hasattr(self, '_set'):
self._set()
def _unset_as_(self):
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
def _get_router_id(self):
"""
Getter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
return self.__router_id
def _set_router_id(self, v, load=False):
"""
Setter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
If this variable is read-only (config: false) in the
source YANG file, then _set_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_router_id() directly.
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """router_id must be of a type compatible with oc-yang:dotted-quad""",
'defined-type': "oc-yang:dotted-quad",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)""",
})
self.__router_id = t
if hasattr(self, '_set'):
self._set()
def _unset_router_id(self):
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
as_ = __builtin__.property(_get_as_, _set_as_)
router_id = __builtin__.property(_get_router_id, _set_router_id)
_pyangbind_elements = OrderedDict([('as_', as_), ('router_id', router_id), ])
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-common - based on the path /bgp/global/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the global BGP router
"""
__slots__ = ('_path_helper', '_extmethods', '__as_','__router_id',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'global', 'config']
def _get_as_(self):
"""
Getter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
return self.__as_
def _set_as_(self, v, load=False):
"""
Setter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_() directly.
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """as_ must be of a type compatible with oc-inet:as-number""",
'defined-type': "oc-inet:as-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)""",
})
self.__as_ = t
if hasattr(self, '_set'):
self._set()
def _unset_as_(self):
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
def _get_router_id(self):
"""
Getter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
return self.__router_id
def _set_router_id(self, v, load=False):
"""
Setter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
If this variable is read-only (config: false) in the
source YANG file, then _set_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_router_id() directly.
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """router_id must be of a type compatible with oc-yang:dotted-quad""",
'defined-type': "oc-yang:dotted-quad",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)""",
})
self.__router_id = t
if hasattr(self, '_set'):
self._set()
def _unset_router_id(self):
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
as_ = __builtin__.property(_get_as_, _set_as_)
router_id = __builtin__.property(_get_router_id, _set_router_id)
_pyangbind_elements = OrderedDict([('as_', as_), ('router_id', router_id), ])
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-common-multiprotocol - based on the path /bgp/global/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the global BGP router
"""
__slots__ = ('_path_helper', '_extmethods', '__as_','__router_id',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'global', 'config']
def _get_as_(self):
"""
Getter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
return self.__as_
def _set_as_(self, v, load=False):
"""
Setter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_() directly.
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """as_ must be of a type compatible with oc-inet:as-number""",
'defined-type': "oc-inet:as-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)""",
})
self.__as_ = t
if hasattr(self, '_set'):
self._set()
def _unset_as_(self):
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
def _get_router_id(self):
"""
Getter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
return self.__router_id
def _set_router_id(self, v, load=False):
"""
Setter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
If this variable is read-only (config: false) in the
source YANG file, then _set_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_router_id() directly.
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """router_id must be of a type compatible with oc-yang:dotted-quad""",
'defined-type': "oc-yang:dotted-quad",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)""",
})
self.__router_id = t
if hasattr(self, '_set'):
self._set()
def _unset_router_id(self):
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
as_ = __builtin__.property(_get_as_, _set_as_)
router_id = __builtin__.property(_get_router_id, _set_router_id)
_pyangbind_elements = OrderedDict([('as_', as_), ('router_id', router_id), ])
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-common-structure - based on the path /bgp/global/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the global BGP router
"""
__slots__ = ('_path_helper', '_extmethods', '__as_','__router_id',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'global', 'config']
def _get_as_(self):
"""
Getter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
return self.__as_
def _set_as_(self, v, load=False):
"""
Setter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_() directly.
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """as_ must be of a type compatible with oc-inet:as-number""",
'defined-type': "oc-inet:as-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)""",
})
self.__as_ = t
if hasattr(self, '_set'):
self._set()
def _unset_as_(self):
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
def _get_router_id(self):
"""
Getter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
return self.__router_id
def _set_router_id(self, v, load=False):
"""
Setter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
If this variable is read-only (config: false) in the
source YANG file, then _set_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_router_id() directly.
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """router_id must be of a type compatible with oc-yang:dotted-quad""",
'defined-type': "oc-yang:dotted-quad",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)""",
})
self.__router_id = t
if hasattr(self, '_set'):
self._set()
def _unset_router_id(self):
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
as_ = __builtin__.property(_get_as_, _set_as_)
router_id = __builtin__.property(_get_router_id, _set_router_id)
_pyangbind_elements = OrderedDict([('as_', as_), ('router_id', router_id), ])
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-peer-group - based on the path /bgp/global/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the global BGP router
"""
__slots__ = ('_path_helper', '_extmethods', '__as_','__router_id',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'global', 'config']
def _get_as_(self):
"""
Getter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
return self.__as_
def _set_as_(self, v, load=False):
"""
Setter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_() directly.
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """as_ must be of a type compatible with oc-inet:as-number""",
'defined-type': "oc-inet:as-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)""",
})
self.__as_ = t
if hasattr(self, '_set'):
self._set()
def _unset_as_(self):
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
def _get_router_id(self):
"""
Getter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
return self.__router_id
def _set_router_id(self, v, load=False):
"""
Setter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
If this variable is read-only (config: false) in the
source YANG file, then _set_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_router_id() directly.
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """router_id must be of a type compatible with oc-yang:dotted-quad""",
'defined-type': "oc-yang:dotted-quad",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)""",
})
self.__router_id = t
if hasattr(self, '_set'):
self._set()
def _unset_router_id(self):
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
as_ = __builtin__.property(_get_as_, _set_as_)
router_id = __builtin__.property(_get_router_id, _set_router_id)
_pyangbind_elements = OrderedDict([('as_', as_), ('router_id', router_id), ])
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-neighbor - based on the path /bgp/global/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the global BGP router
"""
__slots__ = ('_path_helper', '_extmethods', '__as_','__router_id',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'global', 'config']
def _get_as_(self):
"""
Getter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
return self.__as_
def _set_as_(self, v, load=False):
"""
Setter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_() directly.
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """as_ must be of a type compatible with oc-inet:as-number""",
'defined-type': "oc-inet:as-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)""",
})
self.__as_ = t
if hasattr(self, '_set'):
self._set()
def _unset_as_(self):
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
def _get_router_id(self):
"""
Getter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
return self.__router_id
def _set_router_id(self, v, load=False):
"""
Setter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
If this variable is read-only (config: false) in the
source YANG file, then _set_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_router_id() directly.
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """router_id must be of a type compatible with oc-yang:dotted-quad""",
'defined-type': "oc-yang:dotted-quad",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)""",
})
self.__router_id = t
if hasattr(self, '_set'):
self._set()
def _unset_router_id(self):
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
as_ = __builtin__.property(_get_as_, _set_as_)
router_id = __builtin__.property(_get_router_id, _set_router_id)
_pyangbind_elements = OrderedDict([('as_', as_), ('router_id', router_id), ])
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-bgp-global - based on the path /bgp/global/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the global BGP router
"""
__slots__ = ('_path_helper', '_extmethods', '__as_','__router_id',)
_yang_name = 'config'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['bgp', 'global', 'config']
def _get_as_(self):
"""
Getter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
return self.__as_
def _set_as_(self, v, load=False):
"""
Setter method for as_, mapped from YANG variable /bgp/global/config/as (oc-inet:as-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_as_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_as_() directly.
YANG Description: Local autonomous system number of the router. Uses
the 32-bit as-number type from the model in RFC 6991.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """as_ must be of a type compatible with oc-inet:as-number""",
'defined-type': "oc-inet:as-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)""",
})
self.__as_ = t
if hasattr(self, '_set'):
self._set()
def _unset_as_(self):
self.__as_ = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="as", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-inet:as-number', is_config=True)
def _get_router_id(self):
"""
Getter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
return self.__router_id
def _set_router_id(self, v, load=False):
"""
Setter method for router_id, mapped from YANG variable /bgp/global/config/router_id (oc-yang:dotted-quad)
If this variable is read-only (config: false) in the
source YANG file, then _set_router_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_router_id() directly.
YANG Description: Router id of the router - an unsigned 32-bit integer
expressed in dotted quad notation.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """router_id must be of a type compatible with oc-yang:dotted-quad""",
'defined-type': "oc-yang:dotted-quad",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)""",
})
self.__router_id = t
if hasattr(self, '_set'):
self._set()
def _unset_router_id(self):
self.__router_id = YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])$'}), is_leaf=True, yang_name="router-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/bgp', defining_module='openconfig-bgp', yang_type='oc-yang:dotted-quad', is_config=True)
as_ = __builtin__.property(_get_as_, _set_as_)
router_id = __builtin__.property(_get_router_id, _set_router_id)
_pyangbind_elements = OrderedDict([('as_', as_), ('router_id', router_id), ])
| [
"zblevins@netflix.com"
] | zblevins@netflix.com |
b249087047ebb31723edc290464c8960c892c52c | 4138be36f76f33815360ca74a3c80dd1b99bee19 | /tests/m2m_and_gfk_through/models.py | 867abb221851ea093a1ffe67f725cc6ad1db04ec | [
"MIT"
] | permissive | mikewolfd/django-gm2m | e385385b56fb2faa2277e270884f65f60e62ab0c | a8cecc4d6d56c83e8d9c623888f5d07cb6ad8771 | refs/heads/master | 2021-08-22T23:32:17.459805 | 2017-07-19T02:11:21 | 2017-07-19T02:11:21 | 112,767,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,212 | py | """
Test case for issue #5
Django 1.8 migration problems with combined M2M and GM2M relations
"""
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
import gm2m
class GM2MLinks(models.Model):
class Meta:
app_label = 'm2m_and_gfk_through'
sources = gm2m.GM2MField()
class MembershipThrough(models.Model):
class Meta:
app_label = 'm2m_and_gfk_through'
possibly = models.ForeignKey('Membership')
link = models.ForeignKey(GM2MLinks)
class Membership(models.Model):
class Meta:
app_label = 'm2m_and_gfk_through'
many_link = models.ManyToManyField(GM2MLinks, through=MembershipThrough)
class RandomData(models.Model):
"""
Even though this seems completely unrelated to any of the other models,
just adding a GFK causes the problems to surface with an M2M-Through
"""
class Meta:
app_label = 'm2m_and_gfk_through'
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType)
my_gfk = GenericForeignKey('content_type', 'object_id')
| [
"thomas@ksytek.com"
] | thomas@ksytek.com |
397ea1f63ccc85b05f6d84497893b4ac7c16d5bd | 4fc1c45a7e570cc1204d4b5f21150f0771d34ea5 | /tools/benchmark/statistics.py | 46ee420c2e156cd62301b2d0da0e667f9a4ca590 | [] | no_license | CN1Ember/feathernet_mine | 77d29576e4ecb4f85626b94e6ff5884216af3098 | ac0351f59a1ed30abecd1088a46c7af01afa29d5 | refs/heads/main | 2023-05-28T17:19:06.624448 | 2021-06-17T04:39:09 | 2021-06-17T04:39:09 | 374,603,757 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,962 | py | import torch
import torch.nn as nn
from collections import OrderedDict
from .model_hook import ModelHook
from .stat_tree import StatTree, StatNode
from .reporter import report_format
def get_parent_node(root_node, stat_node_name):
assert isinstance(root_node, StatNode)
node = root_node
names = stat_node_name.split('.')
for i in range(len(names) - 1):
node_name = '.'.join(names[0:i+1])
child_index = node.find_child_index(node_name)
assert child_index != -1
node = node.children[child_index]
return node
def convert_leaf_modules_to_stat_tree(leaf_modules):
assert isinstance(leaf_modules, OrderedDict)
create_index = 1
root_node = StatNode(name='root', parent=None)
for leaf_module_name, leaf_module in leaf_modules.items():
names = leaf_module_name.split('.')
for i in range(len(names)):
create_index += 1
stat_node_name = '.'.join(names[0:i+1])
parent_node = get_parent_node(root_node, stat_node_name)
node = StatNode(name=stat_node_name, parent=parent_node)
parent_node.add_child(node)
if i == len(names) - 1: # leaf module itself
input_shape = leaf_module.input_shape.numpy().tolist()
output_shape = leaf_module.output_shape.numpy().tolist()
node.input_shape = input_shape
node.output_shape = output_shape
node.parameter_quantity = leaf_module.parameter_quantity.numpy()[0]
node.inference_memory = leaf_module.inference_memory.numpy()[0]
node.MAdd = leaf_module.MAdd.numpy()[0]
node.Flops = leaf_module.Flops.numpy()[0]
node.ConvFlops = leaf_module.ConvFlops.numpy()[0]
node.duration = leaf_module.duration.numpy()[0]
node.Memory = leaf_module.Memory.numpy().tolist()
return StatTree(root_node)
class ModelStat(object):
def __init__(self, model, input_size, query_granularity=1):
assert isinstance(model, nn.Module)
assert isinstance(input_size, (tuple, list)) and len(input_size) == 4
self._model = model
self._input_size = input_size
self._query_granularity = query_granularity
def _analyze_model(self):
model_hook = ModelHook(self._model, self._input_size)
leaf_modules = model_hook.retrieve_leaf_modules()
stat_tree = convert_leaf_modules_to_stat_tree(leaf_modules)
collected_nodes = stat_tree.get_collected_stat_nodes(self._query_granularity)
return collected_nodes
def show_report(self):
collected_nodes = self._analyze_model()
report = report_format(collected_nodes)
print(report)
def stat(model, input_size, query_granularity=1):
ms = ModelStat(model, input_size, query_granularity)
ms.show_report()
| [
"chenguo@gpu017.scut-smil.cn"
] | chenguo@gpu017.scut-smil.cn |
657299928bba96983fc8b5a5e462eea61359d6db | f4dd8aa4e5476ffde24e27273dd47913c7f9177a | /Dlv2_safe2/tests/parser/26-Hanoi-Tower.asp.test.py | 3ef549c01be148d1405e32e3ec93f938ad439a0b | [
"Apache-2.0"
] | permissive | dave90/Dlv_safe2 | e56071ec1b07c45defda571cb721852e2391abfb | f127f413e3f35d599554e64aaa918bc1629985bc | refs/heads/master | 2020-05-30T10:44:13.473537 | 2015-07-12T12:35:22 | 2015-07-12T12:35:22 | 38,256,201 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,009 | py | input = """
% The meaning of the time predicate is self-evident. As for the disk
% predicate, there are k disks 1,2,...,k. Disks 1, 2, 3, 4 denote pegs.
% Disks 5, ... are "movable". The larger the number of the disk,
% the "smaller" it is.
%
% The program uses additional predicates:
% on(T,N,M), which is true iff at time T, disk M is on disk N
% move(t,N), which is true iff at time T, it is disk N that will be
% moved
% where(T,N), which is true iff at time T, the disk to be moved is moved
% on top of the disk N.
% goal, which is true iff the goal state is reached at time t
% steps(T), which is the number of time steps T, required to reach the goal (provided part of Input data)
% Read in data
on(0,N1,N) :- on0(N,N1).
onG(K,N1,N) :- ongoal(N,N1), steps(K).
% Specify valid arrangements of disks
% Basic condition. Smaller disks are on larger ones
:- time(T), on(T,N1,N), N1>=N.
% Specify a valid move (only for T<t)
% pick a disk to move
move(T,N) | noMove(T,N) :- disk(N), time(T), steps(K), T<K.
:- move(T,N1), move(T,N2), N1 != N2.
:- time(T), steps(K), T<K, not diskMoved(T).
diskMoved(T) :- move(T,Fv1).
% pick a disk onto which to move
where(T,N) | noWhere(T,N) :- disk(N), time(T), steps(K), T<K.
:- where(T,N1), where(T,N2), N1 != N2.
:- time(T), steps(K), T<K, not diskWhere(T).
diskWhere(T) :- where(T,Fv1).
% pegs cannot be moved
:- move(T,N), N<5.
% only top disk can be moved
:- on(T,N,N1), move(T,N).
% a disk can be placed on top only.
:- on(T,N,N1), where(T,N).
% no disk is moved in two consecutive moves
:- move(T,N), move(TM1,N), TM1=T-1.
% Specify effects of a move
on(TP1,N1,N) :- move(T,N), where(T,N1), TP1=T+1.
on(TP1,N,N1) :- time(T), steps(K), T<K,
on(T,N,N1), not move(T,N1), TP1=T+1.
% Goal description
:- not on(K,N,N1), onG(K,N,N1), steps(K).
:- on(K,N,N1), not onG(K,N,N1),steps(K).
% Solution
put(T,M,N) :- move(T,N), where(T,M), steps(K), T<K.
"""
output = """
% The meaning of the time predicate is self-evident. As for the disk
% predicate, there are k disks 1,2,...,k. Disks 1, 2, 3, 4 denote pegs.
% Disks 5, ... are "movable". The larger the number of the disk,
% the "smaller" it is.
%
% The program uses additional predicates:
% on(T,N,M), which is true iff at time T, disk M is on disk N
% move(t,N), which is true iff at time T, it is disk N that will be
% moved
% where(T,N), which is true iff at time T, the disk to be moved is moved
% on top of the disk N.
% goal, which is true iff the goal state is reached at time t
% steps(T), which is the number of time steps T, required to reach the goal (provided part of Input data)
% Read in data
on(0,N1,N) :- on0(N,N1).
onG(K,N1,N) :- ongoal(N,N1), steps(K).
% Specify valid arrangements of disks
% Basic condition. Smaller disks are on larger ones
:- time(T), on(T,N1,N), N1>=N.
% Specify a valid move (only for T<t)
% pick a disk to move
move(T,N) | noMove(T,N) :- disk(N), time(T), steps(K), T<K.
:- move(T,N1), move(T,N2), N1 != N2.
:- time(T), steps(K), T<K, not diskMoved(T).
diskMoved(T) :- move(T,Fv1).
% pick a disk onto which to move
where(T,N) | noWhere(T,N) :- disk(N), time(T), steps(K), T<K.
:- where(T,N1), where(T,N2), N1 != N2.
:- time(T), steps(K), T<K, not diskWhere(T).
diskWhere(T) :- where(T,Fv1).
% pegs cannot be moved
:- move(T,N), N<5.
% only top disk can be moved
:- on(T,N,N1), move(T,N).
% a disk can be placed on top only.
:- on(T,N,N1), where(T,N).
% no disk is moved in two consecutive moves
:- move(T,N), move(TM1,N), TM1=T-1.
% Specify effects of a move
on(TP1,N1,N) :- move(T,N), where(T,N1), TP1=T+1.
on(TP1,N,N1) :- time(T), steps(K), T<K,
on(T,N,N1), not move(T,N1), TP1=T+1.
% Goal description
:- not on(K,N,N1), onG(K,N,N1), steps(K).
:- on(K,N,N1), not onG(K,N,N1),steps(K).
% Solution
put(T,M,N) :- move(T,N), where(T,M), steps(K), T<K.
"""
| [
"davide@davide-All-Series"
] | davide@davide-All-Series |
286a85a4bf2a2e961e3da5764726f6960e7aef3c | f8ad6963bfc851657ea50c6a036cfad29cdd7f60 | /Books/GodOfPython/P12_File/direct/num3.py | bb276e2864e6fd000091dd1be4546d3379e9146b | [] | no_license | foru120/PythonRepository | e1ab0265c0f50ef2e9acdf7447237c913560692b | db6b6be0f9fb91b0a81a3b6a2ec5631daab10f98 | refs/heads/master | 2021-01-01T06:53:11.728109 | 2019-04-25T13:52:50 | 2019-04-25T13:52:50 | 97,541,222 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,609 | py | f = open('D:/02.Python/ch12/direct/num2.txt', 'r+')
item_list = []
for line in f:
item_list.append(line.strip().split('|'))
while True:
print('-------------------------------------')
print('-- 01. 전체 물품 출력(a) ------------')
print('-- 02. 기존 물품 수량 변경(b) -------')
print('-- 03. 새로운 물품 등록(c) ----------')
print('-- 04. 종료(q) ----------------------')
print('-------------------------------------')
menu = input()
if menu=='q':
break
elif menu=='a':
for item in item_list:
print(item)
elif menu=='b':
print('물품명과 수량을 입력하세요.(물품명 수량)')
temp = input().strip().split(' ')
bol = False
for item in item_list:
if item[0]==temp[0]:
item[2]=temp[1]
bol = True
break
if bol==False:
print('입력하신 물품은 존재하지 않습니다.')
elif menu=='c':
print('새로운 물품을 등록하세요.(물품명 가격 수량)')
temp = input().strip().split(' ')
bol = False
for item in item_list:
if item[0]==temp[0]:
print('이미 존재하는 물품입니다')
bol = True
break
if bol == False:
item_list.append(temp)
else:
print('존재하지 않는 메뉴입니다.')
i=0
for item in item_list:
item_list[i]='|'.join(item_list[i])
i+=1
f.seek(0, 0)
f.write('\n'.join(item_list))
f.close() | [
"broodsky1122@hanmail.net"
] | broodsky1122@hanmail.net |
5625743a10abd9a450aa29c2c7e133553ec1874d | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /MY_REPOS/Lambda-Resource-Static-Assets/2-resources/_External-learning-resources/02-pyth/transitions-master/transitions/extensions/markup.py | 86a08305acdd5b8854ef26cfc5097c6eca8a3ddc | [
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 9,152 | py | from six import string_types, iteritems
from functools import partial
import itertools
import importlib
from ..core import Machine, Enum
import numbers
class MarkupMachine(Machine):
# Special attributes such as NestedState._name/_parent or Transition._condition are handled differently
state_attributes = [
"on_exit",
"on_enter",
"ignore_invalid_triggers",
"timeout",
"on_timeout",
"tags",
"label",
]
transition_attributes = ["source", "dest", "prepare", "before", "after", "label"]
def __init__(self, *args, **kwargs):
self._markup = kwargs.pop("markup", {})
self._auto_transitions_markup = kwargs.pop("auto_transitions_markup", False)
self._needs_update = True
if self._markup:
models_markup = self._markup.pop("models", [])
super(MarkupMachine, self).__init__(None, **self._markup)
for m in models_markup:
self._add_markup_model(m)
else:
super(MarkupMachine, self).__init__(*args, **kwargs)
self._markup["before_state_change"] = [
x for x in (rep(f) for f in self.before_state_change) if x
]
self._markup["after_state_change"] = [
x for x in (rep(f) for f in self.before_state_change) if x
]
self._markup["prepare_event"] = [
x for x in (rep(f) for f in self.prepare_event) if x
]
self._markup["finalize_event"] = [
x for x in (rep(f) for f in self.finalize_event) if x
]
self._markup["send_event"] = self.send_event
self._markup["auto_transitions"] = self.auto_transitions
self._markup["ignore_invalid_triggers"] = self.ignore_invalid_triggers
self._markup["queued"] = self.has_queue
@property
def auto_transitions_markup(self):
return self._auto_transitions_markup
@auto_transitions_markup.setter
def auto_transitions_markup(self, value):
self._auto_transitions_markup = value
self._needs_update = True
@property
def markup(self):
self._markup["models"] = self._convert_models()
return self.get_markup_config()
# the only reason why this not part of markup property is that pickle
# has issues with properties during __setattr__ (self.markup is not set)
def get_markup_config(self):
if self._needs_update:
self._convert_states_and_transitions(self._markup)
self._needs_update = False
return self._markup
def add_transition(
self,
trigger,
source,
dest,
conditions=None,
unless=None,
before=None,
after=None,
prepare=None,
**kwargs
):
super(MarkupMachine, self).add_transition(
trigger,
source,
dest,
conditions=conditions,
unless=unless,
before=before,
after=after,
prepare=prepare,
**kwargs
)
self._needs_update = True
def add_states(
self,
states,
on_enter=None,
on_exit=None,
ignore_invalid_triggers=None,
**kwargs
):
super(MarkupMachine, self).add_states(
states,
on_enter=on_enter,
on_exit=on_exit,
ignore_invalid_triggers=ignore_invalid_triggers,
**kwargs
)
self._needs_update = True
@staticmethod
def format_references(func):
try:
return func.__name__
except AttributeError:
pass
if isinstance(func, partial):
return "%s(%s)" % (
func.func.__name__,
", ".join(
itertools.chain(
(str(_) for _ in func.args),
(
"%s=%s" % (key, value)
for key, value in iteritems(
func.keywords if func.keywords else {}
)
),
)
),
)
return str(func)
def _convert_states_and_transitions(self, root):
state = getattr(self, "scoped", self)
if state.initial:
root["initial"] = state.initial
if state == self and state.name:
root["name"] = self.name[:-2]
self._convert_transitions(root)
self._convert_states(root)
def _convert_states(self, root):
key = "states" if getattr(self, "scoped", self) == self else "children"
root[key] = []
for state_name, state in self.states.items():
s_def = _convert(state, self.state_attributes, self.format_references)
if isinstance(state_name, Enum):
s_def["name"] = state_name.name
else:
s_def["name"] = state_name
if getattr(state, "states", []):
with self(state_name):
self._convert_states_and_transitions(s_def)
root[key].append(s_def)
def _convert_transitions(self, root):
root["transitions"] = []
for event in self.events.values():
if self._omit_auto_transitions(event):
continue
for transitions in event.transitions.items():
for trans in transitions[1]:
t_def = _convert(
trans, self.transition_attributes, self.format_references
)
t_def["trigger"] = event.name
con = [
x
for x in (
rep(f.func, self.format_references)
for f in trans.conditions
if f.target
)
if x
]
unl = [
x
for x in (
rep(f.func, self.format_references)
for f in trans.conditions
if not f.target
)
if x
]
if con:
t_def["conditions"] = con
if unl:
t_def["unless"] = unl
root["transitions"].append(t_def)
def _add_markup_model(self, markup):
initial = markup.get("state", None)
if markup["class-name"] == "self":
self.add_model(self, initial)
else:
mod_name, cls_name = markup["class-name"].rsplit(".", 1)
cls = getattr(importlib.import_module(mod_name), cls_name)
self.add_model(cls(), initial)
def _convert_models(self):
models = []
for model in self.models:
state = getattr(model, self.model_attribute)
model_def = dict(state=state.name if isinstance(state, Enum) else state)
model_def["name"] = model.name if hasattr(model, "name") else str(id(model))
model_def["class-name"] = (
"self"
if model == self
else model.__module__ + "." + model.__class__.__name__
)
models.append(model_def)
return models
def _omit_auto_transitions(self, event):
return self.auto_transitions_markup is False and self._is_auto_transition(event)
# auto transition events commonly a) start with the 'to_' prefix, followed by b) the state name
# and c) contain a transition from each state to the target state (including the target)
def _is_auto_transition(self, event):
if event.name.startswith("to_") and len(event.transitions) == len(self.states):
state_name = event.name[len("to_") :]
try:
_ = self.get_state(state_name)
return True
except ValueError:
pass
return False
@classmethod
def _identify_callback(self, name):
callback_type, target = super(MarkupMachine, self)._identify_callback(name)
if callback_type:
self._needs_update = True
return callback_type, target
def rep(func, format_references=None):
""" Return a string representation for `func`. """
if isinstance(func, string_types):
return func
if isinstance(func, numbers.Number):
return str(func)
return format_references(func) if format_references is not None else None
def _convert(obj, attributes, format_references):
s = {}
for key in attributes:
val = getattr(obj, key, False)
if not val:
continue
if isinstance(val, string_types):
s[key] = val
else:
try:
s[key] = [rep(v, format_references) for v in iter(val)]
except TypeError:
s[key] = rep(val, format_references)
return s
| [
"bryan.guner@gmail.com"
] | bryan.guner@gmail.com |
0539f68bfceaf9bc8ba55c42d82bfb718fbf5247 | 8578dca588f39923b6ca3af5419cc58d627cefd8 | /牛客企业真题/网易/网易2019实习生招聘编程题集合/牛牛找工作.py | 744ea44f8e482d9fb7720b8c2636fce0023b61e2 | [] | no_license | huhudaya/leetcode- | abc6eca463fc3ce0776218147c4bbed54e92f11f | cff397cb5202277a1ae85135e91051603debde09 | refs/heads/master | 2021-07-26T01:00:02.690250 | 2020-12-25T14:21:14 | 2020-12-25T14:21:14 | 233,403,333 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,604 | py | '''
为了找到自己满意的工作,牛牛收集了每种工作的难度和报酬。牛牛选工作的标准是在难度不超过自身能力值的情况下,牛牛选择报酬最高的工作。在牛牛选定了自己的工作后,牛牛的小伙伴们来找牛牛帮忙选工作,牛牛依然使用自己的标准来帮助小伙伴们。牛牛的小伙伴太多了,于是他只好把这个任务交给了你。
输入描述:
每个输入包含一个测试用例。
每个测试用例的第一行包含两个正整数,分别表示工作的数量N(N<=100000)和小伙伴的数量M(M<=100000)。
接下来的N行每行包含两个正整数,分别表示该项工作的难度Di(Di<=1000000000)和报酬Pi(Pi<=1000000000)。
接下来的一行包含M个正整数,分别表示M个小伙伴的能力值Ai(Ai<=1000000000)。
保证不存在两项工作的报酬相同。
输出描述:
对于每个小伙伴,在单独的一行输出一个正整数表示他能得到的最高报酬。一个工作可以被多个人选择。
输入例子1:
3 3------>N,M
1 100
10 1000
1000000000 1001
9 10 1000000000
输出例子1:
100
1000
1001
'''
# Java
'''
public class MainCorrect {
public static void main(String[] args) {
//划重点!!!此题坑点:输入中间有空行,所以用BuffferedReader会更麻烦,所以选择用Scanner
Scanner sc = new Scanner(System.in);
int n = sc.nextInt();
int m = sc.nextInt();
//保存所有工作的键值对,即<工作能力,报酬>,而且也保存每个小伙伴的能力值键值对,其报酬为0
Map<Integer, Integer> map = new HashMap<Integer, Integer>();
//保存所有工作的能力值以及要计算的每个小伙伴的能力值
int[] ai = new int[m + n];
for(int i = 0; i < n; i++) {
int di = sc.nextInt();
ai[i] = di;
int pi = sc.nextInt();
map.put(di, pi);
}
//保存要计算的每个小伙伴的能力值
int[] bi = new int[m];
for(int i = 0; i < m; i++) {
ai[i + n] = sc.nextInt();
bi[i] = ai[i + n];
if(!map.containsKey(ai[i + n])) {
map.put(ai[i + n], 0);
}
}
//对能力值进行排序
Arrays.sort(ai);
//保存到目前的能力值为止,所能获得的最大报酬,有种dp的味道
int ma = 0;
for(int i = 0; i < m + n; i++) {
//每次都更新当前能力值所对应的最大报酬,由于ma是保存的<=当前能力值所能获得的最大报酬,所以可行
ma = Math.max(ma, map.get(ai[i])); //因为已经排好序了,相当于和前一个最大值进行比较
map.put(ai[i], ma);
}
//遍历每个小伙伴的能力值,从map中获取到其最大报酬(在上面的for循环中已经更新到了)
for(int i = 0; i < m; i++) {
System.out.println(map.get(bi[i]));
}
}
}
'''
# 超时,两层for循环
'''
public class Main {
//用一个类来记录工作能力和报酬的对应关系,其实可以用map实现的
static class Job implements Comparable<Job>{
int di, pi;
public Job(int di, int pi) {
this.di = di;
this.pi = pi;
}
//按工作能力值进行排序
public int compareTo(Job job) {
return this.di - job.di;
}
}
public static void main(String[] args) throws IOException {
Scanner sc = new Scanner(System.in);
int n = sc.nextInt();
int m = sc.nextInt();
Job[] jobs = new Job[n];
for(int i = 0; i < n; i++) {
int di = sc.nextInt();
int pi = sc.nextInt();
jobs[i] = new Job(di, pi);
}
//对工作能力进行排序
Arrays.sort(jobs);
int[] ai = new int[m];
for(int i = 0; i < m; i++) {
ai[i] = sc.nextInt();
}
//逐一计算每个小伙伴,在其工作能力之内所能获得的最大报酬
for(int i = 0; i < m; i++) {
int j = 0;
int cnt = 0;
while(j < n && jobs[j].di <= ai[i]) {
if(cnt < jobs[j].pi) {
cnt = jobs[j].pi;
}
j++;
}
System.out.println(cnt);
}
}
}
'''
# 二分
'''
解题思路:
自定义一个类Work来描述工作
所有的Work存入works数组中,根据工作的难度对works从小到大排序
定义一个dp数组,dp[i]表示难度小于等于works[i]的最大报酬。
对于输入的能力值,使用二分查找,扫描works数组,找到works数组中小于等于指定能力值,且下标最大的Work。
记该Work的下标为index
dp[index]就是结果
// dp[i]:记录难度小于等于works[i].difficulty的最大报酬
dp[0] = works[0].reward;
for (int i = 1; i < works.length; i++) {
dp[i] = dp[i - 1] > works[i].reward ? dp[i - 1] : works[i].reward;
}
'''
# Java
'''
import java.util.Scanner;
import java.util.Arrays;
import java.util.Comparator;
class Work {
int difficulty;
int reward;
public Work(int difficulty, int reward) {
super();
this.difficulty = difficulty;
this.reward = reward;
}
}
public class Main {
public static void main(String[] args) {
findwork();
}
public static void findwork() {
Scanner in = new Scanner(System.in);
int n = in.nextInt();// 工作数量
int m = in.nextInt();// 人数
Work[] works = new Work[n];// 存储n份工作
int[] dp = new int[n];// dp[n]:难度小于等于works[n].difficulty的工作的最高报酬
// 读入n份工作
for (int i = 0; i < n; i++) {
int difficulty = in.nextInt();
int reward = in.nextInt();
Work work = new Work(difficulty, reward);
works[i] = work;
}
// 根据工作的难度,对n份工作从小到大排序
Arrays.sort(works, new Comparator<Work>() {
@Override
public int compare(Work o1, Work o2) {
return o1.difficulty - o2.difficulty;
}
});
// dp[i]:记录难度小于等于works[i].difficulty的最大报酬
dp[0] = works[0].reward;
for (int i = 1; i < works.length; i++) {
dp[i] = dp[i - 1] > works[i].reward ? dp[i - 1] : works[i].reward;
}
for (int i = 0; i < m; i++) {
int capability = in.nextInt();
// 能力值小于所有的工作的难度
if (capability < works[0].difficulty) {
System.out.println(0);
continue;
}
// 能力值大于等于所有的工作的难度
if (capability >= works[n - 1].difficulty) {
System.out.println(dp[n - 1]);
continue;
}
// 二分查找,找到第一个小于capability的work
int low = 0;
int high = n - 1;
while (low <= high) {
int middle = (low + high) / 2;
// works[middle]是符合能力值,且难度最大的工作
if (works[middle].difficulty <= capability && works[middle + 1].difficulty > capability) {
System.out.println(dp[middle]);
break;
}
// 找到难度等于能力值,且下标最大的工作
if (works[middle].difficulty == capability) {
// 找到最后一个符合capability的工作
int index = middle;
while (index + 1 < n && works[index + 1].difficulty == capability) {
index++;
}
System.out.println(dp[middle]);
break;
} else if (capability > works[middle].difficulty) {
low = middle + 1;
} else if (capability < works[middle].difficulty) {
high = middle - 1;
}
}
}
}
}
'''
# 自己的版本
import sys
n, m = list(map(int, input().strip().split()))
di = []
map = dict()
for i in range(n):
line = sys.stdin.readline()
d, p = [int(i) for i in line.strip().split()]
di.append(d)
map[d] = p
cap = [int(i) for i in input().strip().split()]
# cap = [9, 10, 1000000000]
for i in cap:
di.append(i)
if i not in map:
map[i] = 0
di.sort()
# dp
dp = [0 for i in range(m + n)]
dp[0] = map[di[0]]
for i in range(1, m + n):
dp[i] = max(map[di[i]], dp[i - 1])
map[di[i]] = dp[i]
for i in cap:
print(map[i])
import sys
def main():
lines = sys.stdin.readlines()
lines = [l.strip().split() for l in lines if l.strip()]
n, m = int(lines[0][0]), int(lines[0][1])
res = [0] * (n + m)
abilities = list(map(int, lines[-1]))
maps = dict()
for index, l in enumerate(lines[1:-1]):
d, s = int(l[0]), int(l[1])
maps[d] = s
res[index] = d
for index, ability in enumerate(abilities):
res[index + n] = ability
if ability not in maps:
maps[ability] = 0
res.sort()
maxSalary = 0
for index in range(n + m):
maxSalary = max(maxSalary, maps[res[index]])
maps[res[index]] = maxSalary
for index in range(m):
print(maps[abilities[index]])
if __name__ == '__main__':
main()
| [
"457775600@qq.com"
] | 457775600@qq.com |
679c01dbd43a59d383ce9f52f744523310bd916a | 9ac405635f3ac9332e02d0c7803df757417b7fee | /cotizaciones_componentes/migrations/0021_auto_20200215_1600.py | 4b5458c6c75c7a29b644a6d2623a316422029522 | [] | no_license | odecsarrollo/07_intranet_proyectos | 80af5de8da5faeb40807dd7df3a4f55f432ff4c0 | 524aeebb140bda9b1bf7a09b60e54a02f56fec9f | refs/heads/master | 2023-01-08T04:59:57.617626 | 2020-09-25T18:01:09 | 2020-09-25T18:01:09 | 187,250,667 | 0 | 0 | null | 2022-12-30T09:36:37 | 2019-05-17T16:41:35 | JavaScript | UTF-8 | Python | false | false | 438 | py | # Generated by Django 2.2.6 on 2020-02-15 21:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cotizaciones_componentes', '0020_auto_20200215_0953'),
]
operations = [
migrations.AlterField(
model_name='itemcotizacioncomponente',
name='descripcion',
field=models.CharField(max_length=300, null=True),
),
]
| [
"fabio.garcia.sanchez@gmail.com"
] | fabio.garcia.sanchez@gmail.com |
45f139875b7ada90c52391a4a0b587f14a01e96d | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/226/users/4148/codes/1723_2506.py | 4c9fd872f99d5b04e510eba1c69d4e217871649e | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py | q = int(input("quant.: "))
pc = int(input("percentual: "))
qvc = int(input("quant. de venda: "))
ano = 0
cap = 12000
while(q>0 and q<cap):
q = q + (q*(pc/100))-qvc
ano = ano +1
if (q<0):
print("EXTINCAO")
if(q>cap):
print("LIMITE")
print(ano)
| [
"jvlo@icomp.ufam.edu.br"
] | jvlo@icomp.ufam.edu.br |
1fd9fbd4ecbd63f40d60da6eeaad2a451a719921 | e2e39726195c7bc075b9bd56e757acd136527d5c | /typings/vtkmodules/vtkCommonExecutionModel/vtkMultiTimeStepAlgorithm.pyi | b8f276a14fcdccf562f6efabb3c16449399f1ca2 | [
"BSD-3-Clause"
] | permissive | gen4438/vtk-python-stubs | a652272183d2d1ee48d4639e86bcffc1ac454af0 | c9abd76362adf387af64ce5ddbd04c5d3bebe9da | refs/heads/main | 2023-04-04T02:13:15.459241 | 2021-04-15T10:47:28 | 2021-04-15T10:53:59 | 358,224,363 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,031 | pyi | """
This type stub file was generated by pyright.
"""
from .vtkAlgorithm import vtkAlgorithm
class vtkMultiTimeStepAlgorithm(vtkAlgorithm):
"""
vtkMultiTimeStepAlgorithm - Superclass for algorithms that would like
to
make multiple time requests
Superclass: vtkAlgorithm
This class can be inherited by any algorithm that wishes to make
multiple time requests upstream. The child class uses
UPDATE_TIME_STEPS to make the time requests and use set of
time-stamped data objects are stored in time order in a
vtkMultiBlockDataSet object.
"""
def GetNumberOfGenerationsFromBase(self, string):
"""
V.GetNumberOfGenerationsFromBase(string) -> int
C++: vtkIdType GetNumberOfGenerationsFromBase(const char *type)
override;
Given a the name of a base class of this class type, return the
distance of inheritance between this class type and the named
class (how many generations of inheritance are there between this
class and the named class). If the named class is not in this
class's inheritance tree, return a negative value. Valid
responses will always be nonnegative. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def GetNumberOfGenerationsFromBaseType(self, string):
"""
V.GetNumberOfGenerationsFromBaseType(string) -> int
C++: static vtkIdType GetNumberOfGenerationsFromBaseType(
const char *type)
Given a the name of a base class of this class type, return the
distance of inheritance between this class type and the named
class (how many generations of inheritance are there between this
class and the named class). If the named class is not in this
class's inheritance tree, return a negative value. Valid
responses will always be nonnegative. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def IsA(self, string):
"""
V.IsA(string) -> int
C++: vtkTypeBool IsA(const char *type) override;
Return 1 if this class is the same type of (or a subclass of) the
named class. Returns 0 otherwise. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def IsTypeOf(self, string):
"""
V.IsTypeOf(string) -> int
C++: static vtkTypeBool IsTypeOf(const char *type)
Return 1 if this class type is the same type of (or a subclass
of) the named class. Returns 0 otherwise. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def NewInstance(self):
"""
V.NewInstance() -> vtkMultiTimeStepAlgorithm
C++: vtkMultiTimeStepAlgorithm *NewInstance()
"""
...
def SafeDownCast(self, vtkObjectBase):
"""
V.SafeDownCast(vtkObjectBase) -> vtkMultiTimeStepAlgorithm
C++: static vtkMultiTimeStepAlgorithm *SafeDownCast(
vtkObjectBase *o)
"""
...
def __delattr__(self, *args, **kwargs):
""" Implement delattr(self, name). """
...
def __getattribute__(self, *args, **kwargs):
""" Return getattr(self, name). """
...
def __init__(self, *args, **kwargs) -> None:
...
@staticmethod
def __new__(*args, **kwargs):
""" Create and return a new object. See help(type) for accurate signature. """
...
def __repr__(self, *args, **kwargs):
""" Return repr(self). """
...
def __setattr__(self, *args, **kwargs):
""" Implement setattr(self, name, value). """
...
def __str__(self, *args, **kwargs) -> str:
""" Return str(self). """
...
__this__ = ...
__dict__ = ...
__vtkname__ = ...
| [
"g1e2n04@gmail.com"
] | g1e2n04@gmail.com |
762bbef5680c83f8136ec7bbc152abafe40ac2e2 | 59f64b5cf799e31c97b11828dba4787afb8f3f17 | /hail/python/hailtop/aiocloud/aioazure/client/network_client.py | 495771bb1ead6b26ad134d1796f132e647ffc634 | [
"MIT"
] | permissive | hail-is/hail | 2089e6f3b38548f13fa5c2a8ab67f5cfdd67b4f1 | 07a483ae0f46c66f3ed6fd265b48f48c06298f98 | refs/heads/main | 2023-09-01T15:03:01.450365 | 2023-09-01T02:46:35 | 2023-09-01T02:46:35 | 45,069,467 | 913 | 262 | MIT | 2023-09-14T21:53:32 | 2015-10-27T20:55:42 | Python | UTF-8 | Python | false | false | 1,333 | py | from typing import Optional
import aiohttp
from ..session import AzureSession
from .base_client import AzureBaseClient
class AzureNetworkClient(AzureBaseClient):
def __init__(self, subscription_id, resource_group_name, session: Optional[AzureSession] = None, **kwargs):
if 'params' not in kwargs:
kwargs['params'] = {}
params = kwargs['params']
if 'api-version' not in params:
params['api-version'] = '2021-03-01'
session = session or AzureSession(**kwargs)
super().__init__(f'https://management.azure.com/subscriptions/{subscription_id}/resourceGroups/{resource_group_name}/providers/Microsoft.Network',
session=session)
async def delete_nic(self, nic_name: str, ignore_not_found: bool = False):
try:
await self.delete(f'/networkInterfaces/{nic_name}')
except aiohttp.ClientResponseError as e:
if ignore_not_found and e.status == 404:
pass
raise
async def delete_public_ip(self, public_ip_name: str, ignore_not_found: bool = False):
try:
await self.delete(f'/publicIPAddresses/{public_ip_name}')
except aiohttp.ClientResponseError as e:
if ignore_not_found and e.status == 404:
pass
raise
| [
"noreply@github.com"
] | hail-is.noreply@github.com |
820951c82f77cecf7f891cf5a2edb0f60f69491b | 3e3bf98840d133e56f0d0eb16ba85678ddd6ca45 | /.history/iss_20200102123332.py | 97d3addabc223e3f815e8f462e6cae4fabebbc2d | [] | no_license | Imraj423/backend-iss-location-assessment | a05d3cc229a5fc4857483ae466348c1f8c23c234 | b0565c089a445ccffcb8d0aab3c0be3bb0c1d5b8 | refs/heads/master | 2020-12-03T17:04:58.512124 | 2020-06-24T16:02:02 | 2020-06-24T16:02:02 | 231,400,854 | 0 | 0 | null | 2020-06-24T16:02:04 | 2020-01-02T14:43:44 | null | UTF-8 | Python | false | false | 672 | py | import requests
import turtle
screen = turtle.Screen()
screen.bgpic("map.gif")
screen.screensize(800, 600)
screen.setup(720, 360)
screen.setworldcoordinates(-180, -90, 180, 90)
image = "iss.gif"
screen.addshape(image)
raf = turtle.Turtle()
raf.shape(image)
raf.setheading(45)
raf.penup()
screen.exitonclick()
def main():
pass
def location():
s = requests.get('http://api.open-notify.org/iss-now.json')
s.json
s.raise_for_status
print(s.text)
return s
def astronauts():
r = requests.get('http://api.open-notify.org/astros.json')
r.json
r.raise_for_status()
print(r.text)
return s
if __name__ == "__main__":
main()
| [
"dahqniss@gmail.com"
] | dahqniss@gmail.com |
49ef9ae27e9207fdae7537265f3119a3db58a5c0 | ba744a96d4c8fbcbaa15bcdbc5c3efe3860578b7 | /apps/user_operation/migrations/0002_auto_20190613_1536.py | 64a928bf44052bc84aab7e8ccb0cd5c3952c3bca | [] | no_license | zhangliang852469/Mx_shop_afterend | d84107887936baf122ed489de766f5d22958865b | 9d04de806d6ec87778f2ebe002459ee6a854915e | refs/heads/master | 2022-12-17T21:03:21.315285 | 2019-06-18T09:49:51 | 2019-06-18T09:49:51 | 192,023,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,574 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2019-06-13 07:36
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('user_operation', '0001_initial'),
('goods', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='userleavingmessage',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户'),
),
migrations.AddField(
model_name='userfav',
name='goods',
field=models.ForeignKey(help_text='商品id', on_delete=django.db.models.deletion.CASCADE, to='goods.Goods', verbose_name='商品'),
),
migrations.AddField(
model_name='userfav',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户'),
),
migrations.AddField(
model_name='useraddress',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='用户'),
),
migrations.AlterUniqueTogether(
name='userfav',
unique_together=set([('user', 'goods')]),
),
]
| [
"710567585@qq.com"
] | 710567585@qq.com |
a8045b4b3a67464f1ef11694c333a468a7a44896 | f608dbe94b6e05f63d9bfa030c8ca87725957b93 | /core/src/world/actions/inventory/inventory.py | 4568dbfe32d9d472cb85f44fb7376773d9b41eda | [] | no_license | ProjectMHQ/projectm | 3336c82cbd1e330e065cb178d476c72d552fbfaf | adcb42722354ea4929300e9a4597e734b431c6e5 | refs/heads/master | 2023-04-22T18:41:48.091889 | 2021-01-30T11:28:28 | 2021-01-30T11:28:28 | 216,660,020 | 0 | 0 | null | 2021-05-06T20:33:28 | 2019-10-21T20:32:21 | Python | UTF-8 | Python | false | false | 774 | py | from core.src.world.actions.inventory.inventory_messages import InventoryMessages
from core.src.world.components.inventory import InventoryComponent
from core.src.world.components.position import PositionComponent
from core.src.world.domain.entity import Entity
from core.src.world.utils.entity_utils import load_components, search_entities_in_container_by_keyword
from core.src.world.utils.messaging import emit_sys_msg
messages = InventoryMessages()
async def inventory(entity: Entity):
await load_components(entity, PositionComponent, InventoryComponent)
inventory = entity.get_component(InventoryComponent)
items = await search_entities_in_container_by_keyword(inventory, '*')
await emit_sys_msg(entity, 'inventory', messages.items_to_message(items))
| [
"guido.dassori@gmail.com"
] | guido.dassori@gmail.com |
f4977cc67d8e72649ab03139364065bcecbaaccb | 0ff2c6b1def739e687e7acd809567558bcecd660 | /data_fix/darknet_to_coco.py | 9d159c339a04d09f2adea4326c18abbc528c4ffd | [] | no_license | koalakid1/YOLOPose | 5e7b7cc8df343ad655d070831f0fd6aa1eb45685 | 0da31dfc3bcb216b19746af1e00e3a61b9671517 | refs/heads/master | 2022-04-24T10:04:35.758904 | 2020-04-19T11:45:33 | 2020-04-19T11:45:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,778 | py | import os
import sys
import numpy as np
import pandas as pd
def dark_to_coco(text_dir, output_dir, output_name, img_dir):
sys.stdout = open(os.path.join(output_dir, output_name),"w")
for root, dir, files in os.walk(text_dir):
idx = 0
for f in [f for f in files if os.path.splitext(f)[-1] == ".txt"]:
txt_f = open(os.path.join(text_dir, f), "r")
cor = txt_f.readlines()
data = np.zeros([len(cor),5])
for i in range(len(cor)):
temp = cor[i].split(' ')
for j in range(5):
data[i,j] = float(temp[j])
img_name = f.replace(".txt", ".jpg")
w, h = 416, 416
col_name = ['class', 'xcenter', 'ycenter', 'width', 'height', 'xmin', 'ymin', 'xmax', 'ymax']
df = pd.DataFrame(columns=col_name)
for i in range(5):
df[col_name[i]] = data[:,i]
df['xmin'] = (df['xcenter'] - df['width'] / 2) * w
df['xmin'][df['xmin'] < 0] = 0
df['ymin'] = (df['ycenter'] - df['height'] / 2) * h
df['ymin'][df['ymin'] < 0] = 0
df['xmax'] = (df['xcenter'] + df['width'] / 2) * w
df['ymax'] = (df['ycenter'] + df['height'] / 2) * h
df = df.loc[:,['class', 'xmin', 'ymin', 'xmax', 'ymax']]
df[['class', 'xmin', 'ymin', 'xmax', 'ymax']] = df[['class', 'xmin', 'ymin', 'xmax', 'ymax']].astype(int).astype(str)
df_to_array = df.values.flatten()
data_list = df_to_array.tolist()
data = ' '.join(data_list)
if len(data) != 0:
print("%d %s %d %d %s"%(idx, os.path.join(img_dir, img_name), w, h, data))
idx += 1
sys.stdout.close()
| [
"comojin1994@gmail.com"
] | comojin1994@gmail.com |
6939337d81863f52f0ca5d5db23d4e1d9a6fa3ac | 3b0ff2529391981d234cc0a194503a1651231908 | /python/ccxt/pro/deribit.py | 30cd31b639b24c9d887aa3b096838c10dd31b581 | [
"MIT"
] | permissive | dinamic/ccxt | bba8989a4f165251d97202dfd082fe16a3e22704 | f870edc2eea3cb20e18cf49831b372ce3e7aeb3c | refs/heads/master | 2023-05-10T21:20:37.543926 | 2023-05-04T08:55:59 | 2023-05-04T08:55:59 | 164,325,829 | 2 | 0 | MIT | 2019-04-18T14:33:49 | 2019-01-06T16:39:26 | JavaScript | UTF-8 | Python | false | false | 33,925 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
import ccxt.async_support
from ccxt.async_support.base.ws.cache import ArrayCache, ArrayCacheBySymbolById, ArrayCacheByTimestamp
import hashlib
from ccxt.async_support.base.ws.client import Client
from typing import Optional
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import NotSupported
class deribit(ccxt.async_support.deribit):
def describe(self):
return self.deep_extend(super(deribit, self).describe(), {
'has': {
'ws': True,
'watchBalance': True,
'watchTicker': True,
'watchTickers': False,
'watchTrades': True,
'watchMyTrades': True,
'watchOrders': True,
'watchOrderBook': True,
'watchOHLCV': True,
},
'urls': {
'test': {
'ws': 'wss://test.deribit.com/ws/api/v2',
},
'api': {
'ws': 'wss://www.deribit.com/ws/api/v2',
},
},
'options': {
'timeframes': {
'1m': 1,
'3m': 3,
'5m': 5,
'15m': 15,
'30m': 30,
'1h': 60,
'2h': 120,
'4h': 180,
'6h': 360,
'12h': 720,
'1d': '1D',
},
'currencies': ['BTC', 'ETH', 'SOL', 'USDC'],
},
'streaming': {
},
'exceptions': {
},
})
def request_id(self):
requestId = self.sum(self.safe_integer(self.options, 'requestId', 0), 1)
self.options['requestId'] = requestId
return requestId
async def watch_balance(self, params={}):
"""
see https://docs.deribit.com/#user-portfolio-currency
query for balance and get the amount of funds available for trading or funds locked in orders
:param dict params: extra parameters specific to the deribit api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
await self.authenticate(params)
messageHash = 'balance'
url = self.urls['api']['ws']
currencies = self.safe_value(self.options, 'currencies', [])
channels = []
for i in range(0, len(currencies)):
currencyCode = currencies[i]
channels.append('user.portfolio.' + currencyCode)
subscribe = {
'jsonrpc': '2.0',
'method': 'private/subscribe',
'params': {
'channels': channels,
},
'id': self.request_id(),
}
request = self.deep_extend(subscribe, params)
return await self.watch(url, messageHash, request, messageHash, request)
def handle_balance(self, client: Client, message):
#
# subscription
# {
# jsonrpc: '2.0',
# method: 'subscription',
# params: {
# channel: 'user.portfolio.btc',
# data: {
# total_pl: 0,
# session_upl: 0,
# session_rpl: 0,
# projected_maintenance_margin: 0,
# projected_initial_margin: 0,
# projected_delta_total: 0,
# portfolio_margining_enabled: False,
# options_vega: 0,
# options_value: 0,
# options_theta: 0,
# options_session_upl: 0,
# options_session_rpl: 0,
# options_pl: 0,
# options_gamma: 0,
# options_delta: 0,
# margin_balance: 0.0015,
# maintenance_margin: 0,
# initial_margin: 0,
# futures_session_upl: 0,
# futures_session_rpl: 0,
# futures_pl: 0,
# fee_balance: 0,
# estimated_liquidation_ratio_map: {},
# estimated_liquidation_ratio: 0,
# equity: 0.0015,
# delta_total_map: {},
# delta_total: 0,
# currency: 'BTC',
# balance: 0.0015,
# available_withdrawal_funds: 0.0015,
# available_funds: 0.0015
# }
# }
# }
#
params = self.safe_value(message, 'params', {})
data = self.safe_value(params, 'data', {})
self.balance['info'] = data
currencyId = self.safe_string(data, 'currency')
currencyCode = self.safe_currency_code(currencyId)
balance = self.parse_balance(data)
self.balance[currencyCode] = balance
messageHash = 'balance'
client.resolve(self.balance, messageHash)
async def watch_ticker(self, symbol: str, params={}):
"""
see https://docs.deribit.com/#ticker-instrument_name-interval
watches a price ticker, a statistical calculation with the information for a specific market.
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict params: extra parameters specific to the deribit api endpoint
:param str|None params['interval']: specify aggregation and frequency of notifications. Possible values: 100ms, raw
:returns dict: a `ticker structure <https://docs.ccxt.com/#/?id=ticker-structure>`
"""
await self.load_markets()
market = self.market(symbol)
url = self.urls['api']['ws']
interval = self.safe_string(params, 'interval', '100ms')
params = self.omit(params, 'interval')
await self.load_markets()
if interval == 'raw':
await self.authenticate()
channel = 'ticker.' + market['id'] + '.' + interval
message = {
'jsonrpc': '2.0',
'method': 'public/subscribe',
'params': {
'channels': ['ticker.' + market['id'] + '.' + interval],
},
'id': self.request_id(),
}
request = self.deep_extend(message, params)
return await self.watch(url, channel, request, channel, request)
def handle_ticker(self, client: Client, message):
#
# {
# jsonrpc: '2.0',
# method: 'subscription',
# params: {
# channel: 'ticker.BTC_USDC-PERPETUAL.raw',
# data: {
# timestamp: 1655393725041,
# stats: [Object],
# state: 'open',
# settlement_price: 21729.5891,
# open_interest: 164.501,
# min_price: 20792.9376,
# max_price: 21426.225,
# mark_price: 21109.555,
# last_price: 21132,
# instrument_name: 'BTC_USDC-PERPETUAL',
# index_price: 21122.3937,
# funding_8h: -0.00022427,
# estimated_delivery_price: 21122.3937,
# current_funding: -0.00010782,
# best_bid_price: 21106,
# best_bid_amount: 1.143,
# best_ask_price: 21113,
# best_ask_amount: 0.327
# }
# }
# }
#
params = self.safe_value(message, 'params', {})
data = self.safe_value(params, 'data', {})
marketId = self.safe_string(data, 'instrument_name')
symbol = self.safe_symbol(marketId)
ticker = self.parse_ticker(data)
messageHash = self.safe_string(params, 'channel')
self.tickers[symbol] = ticker
client.resolve(ticker, messageHash)
async def watch_trades(self, symbol: str, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
get the list of most recent trades for a particular symbol
see https://docs.deribit.com/#trades-instrument_name-interval
:param str symbol: unified symbol of the market to fetch trades for
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the deribit api endpoint
:param str|None params['interval']: specify aggregation and frequency of notifications. Possible values: 100ms, raw
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
await self.load_markets()
market = self.market(symbol)
url = self.urls['api']['ws']
interval = self.safe_string(params, 'interval', '100ms')
params = self.omit(params, 'interval')
channel = 'trades.' + market['id'] + '.' + interval
if interval == 'raw':
await self.authenticate()
message = {
'jsonrpc': '2.0',
'method': 'public/subscribe',
'params': {
'channels': [channel],
},
'id': self.request_id(),
}
request = self.deep_extend(message, params)
trades = await self.watch(url, channel, request, channel, request)
return self.filter_by_since_limit(trades, since, limit, 'timestamp')
def handle_trades(self, client: Client, message):
#
# {
# "jsonrpc": "2.0",
# "method": "subscription",
# "params": {
# "channel": "trades.BTC_USDC-PERPETUAL.100ms",
# "data": [{
# "trade_seq": 501899,
# "trade_id": "USDC-2436803",
# "timestamp": 1655397355998,
# "tick_direction": 2,
# "price": 21026,
# "mark_price": 21019.9719,
# "instrument_name": "BTC_USDC-PERPETUAL",
# "index_price": 21031.7847,
# "direction": "buy",
# "amount": 0.049
# }]
# }
# }
#
params = self.safe_value(message, 'params', {})
channel = self.safe_string(params, 'channel', '')
parts = channel.split('.')
marketId = self.safe_string(parts, 1)
symbol = self.safe_symbol(marketId)
market = self.safe_market(marketId)
trades = self.safe_value(params, 'data', [])
stored = self.safe_value(self.trades, symbol)
if stored is None:
limit = self.safe_integer(self.options, 'tradesLimit', 1000)
stored = ArrayCache(limit)
self.trades[symbol] = stored
for i in range(0, len(trades)):
trade = trades[i]
parsed = self.parse_trade(trade, market)
stored.append(parsed)
self.trades[symbol] = stored
client.resolve(self.trades[symbol], channel)
async def watch_my_trades(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
get the list of trades associated with the user
see https://docs.deribit.com/#user-trades-instrument_name-interval
:param str symbol: unified symbol of the market to fetch trades for. Use 'any' to watch all trades
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the deribit api endpoint
:param str|None params['interval']: specify aggregation and frequency of notifications. Possible values: 100ms, raw
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
await self.authenticate(params)
if symbol is not None:
await self.load_markets()
symbol = self.symbol(symbol)
url = self.urls['api']['ws']
interval = self.safe_string(params, 'interval', 'raw')
params = self.omit(params, 'interval')
channel = 'user.trades.any.any.' + interval
message = {
'jsonrpc': '2.0',
'method': 'private/subscribe',
'params': {
'channels': [channel],
},
'id': self.request_id(),
}
request = self.deep_extend(message, params)
trades = await self.watch(url, channel, request, channel, request)
return self.filter_by_symbol_since_limit(trades, symbol, since, limit)
def handle_my_trades(self, client: Client, message):
#
# {
# "jsonrpc": "2.0",
# "method": "subscription",
# "params": {
# "channel": "user.trades.any.any.raw",
# "data": [{
# "trade_seq": 149546319,
# "trade_id": "219381310",
# "timestamp": 1655421193564,
# "tick_direction": 0,
# "state": "filled",
# "self_trade": False,
# "reduce_only": False,
# "profit_loss": 0,
# "price": 20236.5,
# "post_only": False,
# "order_type": "market",
# "order_id": "46108941243",
# "matching_id": null,
# "mark_price": 20233.96,
# "liquidity": "T",
# "instrument_name": "BTC-PERPETUAL",
# "index_price": 20253.31,
# "fee_currency": "BTC",
# "fee": 2.5e-7,
# "direction": "buy",
# "amount": 10
# }]
# }
# }
#
params = self.safe_value(message, 'params', {})
channel = self.safe_string(params, 'channel', '')
trades = self.safe_value(params, 'data', [])
cachedTrades = self.myTrades
if cachedTrades is None:
limit = self.safe_integer(self.options, 'tradesLimit', 1000)
cachedTrades = ArrayCacheBySymbolById(limit)
parsed = self.parse_trades(trades)
marketIds = {}
for i in range(0, len(parsed)):
trade = parsed[i]
cachedTrades.append(trade)
symbol = trade['symbol']
marketIds[symbol] = True
client.resolve(cachedTrades, channel)
async def watch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):
"""
see https://docs.deribit.com/#public-get_book_summary_by_instrument
watches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
:param str symbol: unified symbol of the market to fetch the order book for
:param int|None limit: the maximum amount of order book entries to return
:param dict params: extra parameters specific to the deribit api endpoint
:param str params['interval']: Frequency of notifications. Events will be aggregated over self interval. Possible values: 100ms, raw
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/#/?id=order-book-structure>` indexed by market symbols
"""
await self.load_markets()
market = self.market(symbol)
url = self.urls['api']['ws']
interval = self.safe_string(params, 'interval', '100ms')
params = self.omit(params, 'interval')
if interval == 'raw':
await self.authenticate()
channel = 'book.' + market['id'] + '.' + interval
subscribe = {
'jsonrpc': '2.0',
'method': 'public/subscribe',
'params': {
'channels': [channel],
},
'id': self.request_id(),
}
request = self.deep_extend(subscribe, params)
orderbook = await self.watch(url, channel, request, channel)
return orderbook.limit()
def handle_order_book(self, client: Client, message):
#
# snapshot
# {
# "jsonrpc": "2.0",
# "method": "subscription",
# "params": {
# "channel": "book.BTC_USDC-PERPETUAL.raw",
# "data": {
# "type": "snapshot",
# "timestamp": 1655395057025,
# "instrument_name": "BTC_USDC-PERPETUAL",
# "change_id": 1550694837,
# "bids": [
# ["new", 20987, 0.487],
# ["new", 20986, 0.238],
# ],
# "asks": [
# ["new", 20999, 0.092],
# ["new", 21000, 1.238],
# ]
# }
# }
# }
#
# change
# {
# "jsonrpc": "2.0",
# "method": "subscription",
# "params": {
# "channel": "book.BTC_USDC-PERPETUAL.raw",
# "data": {
# "type": "change",
# "timestamp": 1655395168086,
# "prev_change_id": 1550724481,
# "instrument_name": "BTC_USDC-PERPETUAL",
# "change_id": 1550724483,
# "bids": [
# ["new", 20977, 0.109],
# ["delete", 20975, 0]
# ],
# "asks": []
# }
# }
# }
#
params = self.safe_value(message, 'params', {})
data = self.safe_value(params, 'data', {})
channel = self.safe_string(params, 'channel')
marketId = self.safe_string(data, 'instrument_name')
symbol = self.safe_symbol(marketId)
timestamp = self.safe_number(data, 'timestamp')
storedOrderBook = self.safe_value(self.orderbooks, symbol)
if storedOrderBook is None:
storedOrderBook = self.counted_order_book()
asks = self.safe_value(data, 'asks', [])
bids = self.safe_value(data, 'bids', [])
self.handle_deltas(storedOrderBook['asks'], asks)
self.handle_deltas(storedOrderBook['bids'], bids)
storedOrderBook['nonce'] = timestamp
storedOrderBook['timestamp'] = timestamp
storedOrderBook['datetime'] = self.iso8601(timestamp)
storedOrderBook['symbol'] = symbol
self.orderbooks[symbol] = storedOrderBook
client.resolve(storedOrderBook, channel)
def clean_order_book(self, data):
bids = self.safe_value(data, 'bids', [])
asks = self.safe_value(data, 'asks', [])
cleanedBids = []
for i in range(0, len(bids)):
cleanedBids.append([bids[i][1], bids[i][2]])
cleanedAsks = []
for i in range(0, len(asks)):
cleanedAsks.append([asks[i][1], asks[i][2]])
data['bids'] = cleanedBids
data['asks'] = cleanedAsks
return data
def handle_delta(self, bookside, delta):
price = delta[1]
amount = delta[2]
if delta[0] == 'new' or delta[0] == 'change':
bookside.store(price, amount, 1)
elif delta[0] == 'delete':
bookside.store(price, amount, 0)
def handle_deltas(self, bookside, deltas):
for i in range(0, len(deltas)):
self.handle_delta(bookside, deltas[i])
async def watch_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
see https://docs.deribit.com/#user-orders-instrument_name-raw
watches information on multiple orders made by the user
:param str symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the deribit api endpoint
:returns [dict]: a list of [order structures]{@link https://docs.ccxt.com/#/?id=order-structure
"""
await self.load_markets()
await self.authenticate(params)
if symbol is not None:
symbol = self.symbol(symbol)
url = self.urls['api']['ws']
currency = self.safe_string(params, 'currency', 'any')
interval = self.safe_string(params, 'interval', 'raw')
kind = self.safe_string(params, 'kind', 'any')
params = self.omit(params, 'interval', 'currency', 'kind')
channel = 'user.orders.' + kind + '.' + currency + '.' + interval
message = {
'jsonrpc': '2.0',
'method': 'private/subscribe',
'params': {
'channels': [channel],
},
'id': self.request_id(),
}
request = self.deep_extend(message, params)
orders = await self.watch(url, channel, request, channel, request)
if self.newUpdates:
limit = orders.getLimit(symbol, limit)
return self.filter_by_symbol_since_limit(orders, symbol, since, limit)
def handle_orders(self, client: Client, message):
# Does not return a snapshot of current orders
#
# {
# jsonrpc: '2.0',
# method: 'subscription',
# params: {
# channel: 'user.orders.any.any.raw',
# data: {
# web: True,
# time_in_force: 'good_til_cancelled',
# replaced: False,
# reduce_only: False,
# profit_loss: 0,
# price: 50000,
# post_only: False,
# order_type: 'limit',
# order_state: 'open',
# order_id: '46094375191',
# max_show: 10,
# last_update_timestamp: 1655401625037,
# label: '',
# is_liquidation: False,
# instrument_name: 'BTC-PERPETUAL',
# filled_amount: 0,
# direction: 'sell',
# creation_timestamp: 1655401625037,
# commission: 0,
# average_price: 0,
# api: False,
# amount: 10
# }
# }
# }
#
if self.orders is None:
limit = self.safe_integer(self.options, 'ordersLimit', 1000)
self.orders = ArrayCacheBySymbolById(limit)
params = self.safe_value(message, 'params', {})
channel = self.safe_string(params, 'channel', '')
data = self.safe_value(params, 'data', {})
orders = []
if isinstance(data, list):
orders = self.parse_orders(data)
else:
order = self.parse_order(data)
orders = [order]
for i in range(0, len(orders)):
self.orders.append(orders[i])
client.resolve(self.orders, channel)
async def watch_ohlcv(self, symbol: str, timeframe='1m', since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
see https://docs.deribit.com/#chart-trades-instrument_name-resolution
watches historical candlestick data containing the open, high, low, and close price, and the volume of a market
:param str symbol: unified symbol of the market to fetch OHLCV data for
:param str timeframe: the length of time each candle represents
:param int|None since: timestamp in ms of the earliest candle to fetch
:param int|None limit: the maximum amount of candles to fetch
:param dict params: extra parameters specific to the deribit api endpoint
:returns [[int]]: A list of candles ordered, open, high, low, close, volume
"""
await self.load_markets()
market = self.market(symbol)
url = self.urls['api']['ws']
timeframes = self.safe_value(self.options, 'timeframes', {})
interval = self.safe_string(timeframes, timeframe)
if interval is None:
raise NotSupported(self.id + ' self interval is not supported, please provide one of the supported timeframes')
channel = 'chart.trades.' + market['id'] + '.' + interval
message = {
'jsonrpc': '2.0',
'method': 'public/subscribe',
'params': {
'channels': [channel],
},
'id': self.request_id(),
}
request = self.deep_extend(message, params)
ohlcv = await self.watch(url, channel, request, channel, request)
if self.newUpdates:
limit = ohlcv.getLimit(market['symbol'], limit)
return self.filter_by_since_limit(ohlcv, since, limit, 0)
def handle_ohlcv(self, client: Client, message):
#
# {
# jsonrpc: '2.0',
# method: 'subscription',
# params: {
# channel: 'chart.trades.BTC_USDC-PERPETUAL.1',
# data: {
# volume: 0,
# tick: 1655403420000,
# open: 20951,
# low: 20951,
# high: 20951,
# cost: 0,
# close: 20951
# }
# }
# }
#
params = self.safe_value(message, 'params', {})
channel = self.safe_string(params, 'channel', '')
parts = channel.split('.')
marketId = self.safe_string(parts, 2)
symbol = self.safe_symbol(marketId)
ohlcv = self.safe_value(params, 'data', {})
parsed = [
self.safe_number(ohlcv, 'tick'),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number(ohlcv, 'volume'),
]
stored = self.safe_value(self.ohlcvs, symbol)
if stored is None:
limit = self.safe_integer(self.options, 'OHLCVLimit', 1000)
stored = ArrayCacheByTimestamp(limit)
stored.append(parsed)
self.ohlcvs[symbol] = stored
client.resolve(stored, channel)
def handle_message(self, client: Client, message):
#
# error
# {
# "jsonrpc": "2.0",
# "id": 1,
# "error": {
# "message": "Invalid params",
# "data": {
# "reason": "invalid format",
# "param": "nonce"
# },
# "code": -32602
# },
# "usIn": "1655391709417993",
# "usOut": "1655391709418049",
# "usDiff": 56,
# "testnet": False
# }
#
# subscribe
# {
# jsonrpc: '2.0',
# id: 2,
# result: ['ticker.BTC_USDC-PERPETUAL.raw'],
# usIn: '1655393625889396',
# usOut: '1655393625889518',
# usDiff: 122,
# testnet: False
# }
#
# notification
# {
# jsonrpc: '2.0',
# method: 'subscription',
# params: {
# channel: 'ticker.BTC_USDC-PERPETUAL.raw',
# data: {
# timestamp: 1655393724752,
# stats: [Object],
# state: 'open',
# settlement_price: 21729.5891,
# open_interest: 164.501,
# min_price: 20792.9001,
# max_price: 21426.1864,
# mark_price: 21109.4757,
# last_price: 21132,
# instrument_name: 'BTC_USDC-PERPETUAL',
# index_price: 21122.3937,
# funding_8h: -0.00022427,
# estimated_delivery_price: 21122.3937,
# current_funding: -0.00011158,
# best_bid_price: 21106,
# best_bid_amount: 1.143,
# best_ask_price: 21113,
# best_ask_amount: 0.402
# }
# }
# }
#
error = self.safe_value(message, 'error')
if error is not None:
raise ExchangeError(self.id + ' ' + self.json(error))
params = self.safe_value(message, 'params')
channel = self.safe_string(params, 'channel')
if channel is not None:
parts = channel.split('.')
channelId = self.safe_string(parts, 0)
userHandlers = {
'trades': self.handle_my_trades,
'portfolio': self.handle_balance,
'orders': self.handle_orders,
}
handlers = {
'ticker': self.handle_ticker,
'book': self.handle_order_book,
'trades': self.handle_trades,
'chart': self.handle_ohlcv,
'user': self.safe_value(userHandlers, self.safe_string(parts, 1)),
}
handler = self.safe_value(handlers, channelId)
if handler is not None:
return handler(client, message)
raise NotSupported(self.id + ' no handler found for self message ' + self.json(message))
result = self.safe_value(message, 'result', {})
accessToken = self.safe_string(result, 'access_token')
if accessToken is not None:
return self.handle_authentication_message(client, message)
return message
def handle_authentication_message(self, client: Client, message):
#
# {
# jsonrpc: '2.0',
# id: 1,
# result: {
# token_type: 'bearer',
# scope: 'account:read_write block_trade:read_write connection custody:read_write mainaccount name:ccxt trade:read_write wallet:read_write',
# refresh_token: '1686927372328.1EzFBRmt.logRQWXkPA1oE_Tk0gRsls9Hau7YN6a321XUBnxvR4x6cryhbkKcniUJU-czA8_zKXrqQGpQmfoDwhLIjIsWCvRuu6otbg-LKWlrtTX1GQqLcPaTTHAdZGTMV-HM8HiS03QBd9MIXWRfF53sKj2hdR9nZPZ6MH1XrkpAZPB_peuEEB9wlcc3elzWEZFtCmiy1fnQ8TPHwAJMt3nuUmEcMLt_-F554qrsg_-I66D9xMiifJj4dBemdPfV_PkGPRIwIoKlxDjyv2-xfCw-4eKyo6Hu1m2h6gT1DPOTxSXcBgfBQjpi-_uY3iAIj7U6xjC46PHthEdquhEuCTZl7UfCRZSAWwZA',
# expires_in: 31536000,
# access_token: '1686923272328.1CkwEx-u.qHradpIulmuoeboKMEi8PkQ1_4DF8yFE2zywBTtkD32sruVC53b1HwL5OWRuh2nYAndXff4xuXIMRkkEfMAFCeq24prihxxinoS8DDVkKBxedGx4CUPJFeXjmh7wuRGqQOLg1plXOpbF3fwF2KPEkAuETwcpcVY6K9HUVjutNRfxFe2TR7CvuS9x8TATvoPeu7H1ezYl-LkKSaRifdTXuwituXgp4oDbPRyQLniEBWuYF9rY7qbABxuOJlXI1VZ63u7Bh0mGWei-KeVeqHGNpy6OgrFRPXPxa9_U7vaxCyHW3zZ9959TQ1QUMLWtUX-NLBEv3BT5eCieW9HORYIOKfsgkpd3'
# },
# usIn: '1655391872327712',
# usOut: '1655391872328515',
# usDiff: 803,
# testnet: False
# }
#
messageHash = 'authenticated'
client.resolve(message, messageHash)
return message
def authenticate(self, params={}):
url = self.urls['api']['ws']
client = self.client(url)
time = self.milliseconds()
timeString = self.number_to_string(time)
nonce = timeString
messageHash = 'authenticated'
future = self.safe_value(client.subscriptions, messageHash)
if future is None:
self.check_required_credentials()
requestId = self.request_id()
signature = self.hmac(self.encode(timeString + '\n' + nonce + '\n'), self.encode(self.secret), hashlib.sha256)
request = {
'jsonrpc': '2.0',
'id': requestId,
'method': 'public/auth',
'params': {
'grant_type': 'client_signature',
'client_id': self.apiKey,
'timestamp': time,
'signature': signature,
'nonce': nonce,
'data': '',
},
}
future = self.watch(url, messageHash, self.extend(request, params))
client.subscriptions[messageHash] = future
return future
| [
"travis@travis-ci.org"
] | travis@travis-ci.org |
1e593b825cab0a60fb5bff2f8ead37386a8a901b | 031b7927274f55e60d9ab004ce8ea39f34abbbca | /tensorflow_probability/python/bijectors/generalized_pareto.py | 6f4910589e1871a72124a32ef446fd0d61d187fe | [
"Apache-2.0"
] | permissive | brianwa84/probability | 8d87c96d7b8b1a885a7a7377a13978bd13ffa9c3 | 6f8e78d859ac41170be5147c8c7bde54cc5aa83e | refs/heads/master | 2021-06-19T08:58:40.276319 | 2021-05-14T21:43:14 | 2021-05-14T21:44:53 | 146,023,828 | 0 | 0 | Apache-2.0 | 2019-06-06T15:18:43 | 2018-08-24T18:00:25 | Jupyter Notebook | UTF-8 | Python | false | false | 5,582 | py | # Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The GeneralizedPareto bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.bijectors import bijector as bijector_lib
from tensorflow_probability.python.bijectors import chain as chain_bijector
from tensorflow_probability.python.bijectors import shift as shift_bijector
from tensorflow_probability.python.bijectors import sigmoid as sigmoid_bijector
from tensorflow_probability.python.bijectors import softplus as softplus_bijector
from tensorflow_probability.python.internal import auto_composite_tensor
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import parameter_properties
from tensorflow_probability.python.internal import tensor_util
__all__ = [
'GeneralizedPareto',
]
@auto_composite_tensor.auto_composite_tensor(
omit_kwargs=('name',), module_name='tfp.bijectors')
class GeneralizedPareto(bijector_lib.AutoCompositeTensorBijector):
"""Bijector mapping R**n to non-negative reals.
Forward computation maps R**n to the support of the `GeneralizedPareto`
distribution with parameters `loc`, `scale`, and `concentration`.
#### Mathematical Details
The forward computation from `y` in R**n to `x` constrains `x` as follows:
`x >= loc` if `concentration >= 0`
`x >= loc` and `x <= loc + scale / abs(concentration)` if `concentration < 0`
This bijector is used as the `_experimental_default_event_space_bijector` of
the `GeneralizedPareto` distribution.
"""
def __init__(self,
loc,
scale,
concentration,
validate_args=False,
name='generalized_pareto'):
parameters = dict(locals())
with tf.name_scope(name) as name:
dtype = dtype_util.common_dtype(
[loc, scale, concentration], dtype_hint=tf.float32)
self._loc = tensor_util.convert_nonref_to_tensor(loc)
self._scale = tensor_util.convert_nonref_to_tensor(scale)
self._concentration = tensor_util.convert_nonref_to_tensor(concentration)
self._non_negative_concentration_bijector = chain_bijector.Chain([
shift_bijector.Shift(shift=self._loc, validate_args=validate_args),
softplus_bijector.Softplus(validate_args=validate_args)
], validate_args=validate_args)
super(GeneralizedPareto, self).__init__(
validate_args=validate_args,
forward_min_event_ndims=0,
dtype=dtype,
parameters=parameters,
name=name)
@classmethod
def _parameter_properties(cls, dtype):
return dict(
loc=parameter_properties.ParameterProperties(),
scale=parameter_properties.ParameterProperties(
default_constraining_bijector_fn=(
lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))),
concentration=parameter_properties.ParameterProperties(
default_constraining_bijector_fn=(
lambda: softplus_bijector.Softplus(low=dtype_util.eps(dtype)))))
def _is_increasing(self):
return True
@property
def loc(self):
return self._loc
@property
def scale(self):
return self._scale
@property
def concentration(self):
return self._concentration
def _negative_concentration_bijector(self):
# Constructed dynamically so that `loc + scale / concentration` is
# tape-safe.
loc = tf.convert_to_tensor(self.loc)
high = loc + tf.math.abs(self.scale / self.concentration)
return sigmoid_bijector.Sigmoid(
low=loc, high=high, validate_args=self.validate_args)
def _forward(self, x):
return tf.where(self._concentration < 0.,
self._negative_concentration_bijector().forward(x),
self._non_negative_concentration_bijector.forward(x))
def _inverse(self, y):
return tf.where(self._concentration < 0.,
self._negative_concentration_bijector().inverse(y),
self._non_negative_concentration_bijector.inverse(y))
def _forward_log_det_jacobian(self, x):
event_ndims = self.forward_min_event_ndims
return tf.where(
self._concentration < 0.,
self._negative_concentration_bijector().forward_log_det_jacobian(
x, event_ndims=event_ndims),
self._non_negative_concentration_bijector.forward_log_det_jacobian(
x, event_ndims=event_ndims))
def _inverse_log_det_jacobian(self, y):
event_ndims = self.inverse_min_event_ndims
return tf.where(
self._concentration < 0.,
self._negative_concentration_bijector().inverse_log_det_jacobian(
y, event_ndims=event_ndims),
self._non_negative_concentration_bijector.inverse_log_det_jacobian(
y, event_ndims=event_ndims))
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
19b6ed2b84c9efa20a3e5b0ba417374644185cee | 8668830f34ce260565217ea3b49e090778780b44 | /sms_gateway/tests/test_task_text_blast_coupon.py | e16bbece6a9871e7570065ff5e98477f81d5e0d4 | [] | no_license | wcirillo/ten | 72baf94da958b2ee6f34940c1fc3116660436762 | a780ccdc3350d4b5c7990c65d1af8d71060c62cc | refs/heads/master | 2016-09-06T13:39:03.966370 | 2015-07-02T12:37:36 | 2015-07-02T12:37:36 | 15,700,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,517 | py | """
Tests of sms_gateway app tasks.
"""
from django.conf import settings
from coupon.models import Action, Coupon, CouponAction, SubscriberAction
from sms_gateway.tasks import text_blast_coupon
from sms_gateway.tests.sms_gateway_test_case import SMSGatewayTestCase
settings.CELERY_ALWAYS_EAGER = True
class TestTextBlast(SMSGatewayTestCase):
""" Unit tests for text blasting. """
fixtures = ['test_advertiser', 'test_coupon', 'test_subscriber', ]
def setUp(self):
"""
Tests need eager queue. Tests needs access to the request factory.
"""
super(TestTextBlast, self).setUp()
self.action = Action.objects.get(id=11)
def test_text_blast_coupon(self):
"""
Asserts that a valid coupon is blasted.
"""
coupon = Coupon.objects.get(id=1)
print(coupon)
coupon.sms = coupon.get_default_sms()
print(coupon.sms)
coupon.save()
self.assertEquals(CouponAction.objects.filter(
coupon=coupon,
action=self.action
).count(), 0)
self.assertEquals(SubscriberAction.objects.filter(
coupon=coupon,
action=self.action
).count(), 0)
text_blast_coupon(coupon)
# Check for subscriber action recorded for this coupon
self.assertEquals(str(coupon.subscriber_actions.all()[0].action),
'Text Blasted')
try:
coupon_action = CouponAction.objects.get(
coupon=coupon,
action=self.action
)
self.assertEquals(coupon_action.count, 1)
except CouponAction.DoesNotExist:
self.fail('CouponAction was not created.')
# Try blasting it again. This is not allowed.
text_blast_coupon(coupon)
try:
coupon_action = CouponAction.objects.get(
coupon=coupon,
action=self.action
)
self.assertEquals(coupon_action.count, 1)
except CouponAction.DoesNotExist:
self.fail('CouponAction was not created.')
# Try blasting a different coupon of same business now.
coupon = Coupon.objects.get(id=5)
text_blast_coupon(coupon)
self.assertEquals(CouponAction.objects.filter(
coupon=coupon,
action=self.action
).count(), 0)
def test_blast_not_sms(self):
"""
Assert a coupon that has is_redeemed_by_sms False does not blast.
"""
coupon = Coupon.objects.get(id=2)
text_blast_coupon(coupon)
self.assertEquals(CouponAction.objects.filter(
coupon=coupon,
action=self.action
).count(), 0)
def test_blast_not_approved(self):
"""
Assert a coupon that is not approved does not blast.
"""
coupon = Coupon.objects.get(id=3)
text_blast_coupon(coupon)
self.assertEquals(CouponAction.objects.filter(
coupon=coupon,
action=self.action
).count(), 0)
def test_blast_no_zip(self):
"""
Assert a coupon that has no zip code does not blast.
"""
coupon = Coupon.objects.get(id=4)
text_blast_coupon(coupon)
self.assertEquals(CouponAction.objects.filter(
coupon=coupon,
action=self.action
).count(), 0)
| [
"williamcirillo@gmail.com"
] | williamcirillo@gmail.com |
cb9b49a0b772a122167aec5fb2589f2787bb37db | e27f9f1f8bef8b1f4676df84ee3e753974d21a1c | /tests/ignite/handlers/test_lr_finder.py | 61726a9193063b693d63c665108578d4ab4c7dd8 | [
"BSD-3-Clause"
] | permissive | pytorch/ignite | 8fb275638e94e702762eec932b21dc8df7a54cb0 | 34a707e53785cf8a524589f33a570a7516fe064e | refs/heads/master | 2023-09-02T00:27:22.485479 | 2023-08-31T15:10:14 | 2023-08-31T15:10:14 | 111,835,796 | 4,613 | 788 | BSD-3-Clause | 2023-09-13T07:46:41 | 2017-11-23T17:31:21 | Python | UTF-8 | Python | false | false | 25,422 | py | import copy
import os
from pathlib import Path
from unittest.mock import MagicMock
import matplotlib
import pytest
import torch
import torch.nn.functional as F
from torch import nn
from torch.optim import SGD
import ignite.distributed as idist
from ignite.contrib.handlers import FastaiLRFinder
from ignite.engine import create_supervised_trainer, Engine, Events
matplotlib.use("agg")
@pytest.fixture
def no_site_packages():
import sys
matplotlib = sys.modules["matplotlib"]
del sys.modules["matplotlib"]
prev_path = list(sys.path)
sys.path = [p for p in sys.path if "site-packages" not in p]
yield "no_site_packages"
sys.path = prev_path
sys.modules["matplotlib"] = matplotlib
class DummyModel(nn.Module):
def __init__(self, n_channels=10, out_channels=1, flatten_input=False):
super(DummyModel, self).__init__()
self.net = nn.Sequential(nn.Flatten() if flatten_input else nn.Identity(), nn.Linear(n_channels, out_channels))
def forward(self, x):
return self.net(x)
class DummyModelMulipleParamGroups(nn.Module):
def __init__(self):
super(DummyModelMulipleParamGroups, self).__init__()
self.fc1 = nn.Linear(10, 20)
self.fc2 = nn.Linear(20, 10)
self.fc3 = nn.Linear(10, 10)
def forward(self, x):
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
return self.fc3(x)
@pytest.fixture
def model():
model = DummyModel(out_channels=10)
yield model
@pytest.fixture
def model_multiple_param_groups():
model_multiple_param_groups = DummyModelMulipleParamGroups()
yield model_multiple_param_groups
@pytest.fixture
def mnist_model():
model = DummyModel(n_channels=784, out_channels=10, flatten_input=True)
yield model
@pytest.fixture
def optimizer(model):
yield SGD(model.parameters(), lr=1e-4, momentum=0.0)
@pytest.fixture
def optimizer_multiple_param_groups(model_multiple_param_groups):
optimizer_multiple_param_groups = SGD(
[
{"params": model_multiple_param_groups.fc1.parameters(), "lr": 4e-1},
{"params": model_multiple_param_groups.fc2.parameters(), "lr": 3e-2},
{"params": model_multiple_param_groups.fc3.parameters(), "lr": 3e-3},
]
)
yield optimizer_multiple_param_groups
@pytest.fixture
def mnist_optimizer(mnist_model):
yield SGD(mnist_model.parameters(), lr=1e-4, momentum=0.0)
@pytest.fixture
def to_save(model, optimizer):
yield {"model": model, "optimizer": optimizer}
@pytest.fixture
def mnist_to_save(mnist_model, mnist_optimizer):
yield {"model": mnist_model, "optimizer": mnist_optimizer}
@pytest.fixture
def to_save_mulitple_param_groups(model_multiple_param_groups, optimizer_multiple_param_groups):
yield {"model": model_multiple_param_groups, "optimizer": optimizer_multiple_param_groups}
@pytest.fixture
def lr_finder():
yield FastaiLRFinder()
@pytest.fixture
def dummy_engine(model, optimizer):
engine = create_supervised_trainer(model, optimizer, nn.MSELoss())
yield engine
@pytest.fixture
def dummy_engine_mnist(mnist_model, mnist_optimizer):
mnist_engine = create_supervised_trainer(mnist_model, mnist_optimizer, nn.CrossEntropyLoss())
yield mnist_engine
@pytest.fixture
def dummy_engine_mulitple_param_groups(model_multiple_param_groups, optimizer_multiple_param_groups):
engine_multiple_param_groups = create_supervised_trainer(
model_multiple_param_groups, optimizer_multiple_param_groups, nn.MSELoss()
)
yield engine_multiple_param_groups
@pytest.fixture
def dataloader():
yield torch.rand(100, 2, 10)
@pytest.fixture
def dataloader_plot():
yield torch.rand(500, 2, 10)
@pytest.fixture
def mnist_dataloader():
from torch.utils.data import DataLoader
from torchvision.datasets import MNIST
from torchvision.transforms import Compose, Normalize, ToTensor
data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,))])
train_loader = DataLoader(
MNIST(download=True, root="/tmp", transform=data_transform, train=True), batch_size=256, shuffle=True
)
yield train_loader
def test_attach_incorrect_input_args(lr_finder, dummy_engine, model, optimizer, dataloader):
with pytest.raises(TypeError, match=r"Argument to_save should be a mapping"):
with lr_finder.attach(dummy_engine, to_save=123):
pass
with pytest.raises(TypeError, match=r"Object <class 'int'> should have `state_dict` method"):
with lr_finder.attach(dummy_engine, to_save={1: 2}):
pass
with pytest.raises(ValueError, match=r"Mapping to_save should contain 'optimizer' key"):
with lr_finder.attach(dummy_engine, to_save={"model": model}):
pass
to_save = {"model": model, "optimizer": optimizer}
with pytest.raises(ValueError, match=r"smooth_f is outside the range \[0, 1\]"):
with lr_finder.attach(dummy_engine, to_save=to_save, smooth_f=234):
pass
with pytest.raises(ValueError, match=r"diverge_th should be larger than 1"):
with lr_finder.attach(dummy_engine, to_save=to_save, diverge_th=0.0):
pass
with pytest.raises(TypeError, match=r"if provided, num_iter should be an integer"):
with lr_finder.attach(dummy_engine, to_save=to_save, num_iter=0.0):
pass
with pytest.raises(ValueError, match=r"if provided, num_iter should be positive"):
with lr_finder.attach(dummy_engine, to_save=to_save, num_iter=0):
pass
with pytest.raises(TypeError, match=r"Object to_save\['optimizer'] should be torch optimizer"):
with lr_finder.attach(dummy_engine, {"model": to_save["model"], "optimizer": to_save["model"]}):
pass
with pytest.raises(ValueError, match=r"step_mode should be 'exp' or 'linear'"):
with lr_finder.attach(dummy_engine, to_save=to_save, step_mode="abc"):
pass
with lr_finder.attach(dummy_engine, to_save) as trainer_with_finder:
trainer_with_finder.run(dataloader)
with pytest.raises(ValueError, match=r"skip_start cannot be negative"):
lr_finder.plot(skip_start=-1)
with pytest.raises(ValueError, match=r"skip_end cannot be negative"):
lr_finder.plot(skip_end=-1)
with pytest.raises(ValueError, match=r"Number of values of start_lr should be equal to optimizer values."):
with lr_finder.attach(dummy_engine, to_save, start_lr=[0.1, 0.1]):
pass
with pytest.raises(ValueError, match=r"Number of values of end_lr should be equal to optimizer values."):
with lr_finder.attach(dummy_engine, to_save, end_lr=[0.1, 0.1]):
pass
with pytest.raises(TypeError, match=r"start_lr should be a float or list of floats"):
with lr_finder.attach(dummy_engine, to_save, start_lr=1):
pass
with pytest.raises(TypeError, match=r"end_lr should be a float or list of floats"):
with lr_finder.attach(dummy_engine, to_save, end_lr=1):
pass
def test_attach_without_with(lr_finder, dummy_engine, to_save):
_ = lr_finder.attach(dummy_engine, to_save=to_save)
for event in dummy_engine._event_handlers:
assert len(dummy_engine._event_handlers[event]) == 0
with lr_finder.attach(dummy_engine, to_save=to_save) as _:
assert any([len(dummy_engine._event_handlers[event]) != 0 for event in dummy_engine._event_handlers])
with pytest.raises(
RuntimeError, match=r"learning rate finder didn't run yet so lr_suggestion can't be returned"
):
lr_finder.lr_suggestion()
with pytest.raises(RuntimeError, match=r"learning rate finder didn't run yet so results can't be plotted"):
lr_finder.plot()
def test_with_attach(lr_finder, to_save, dummy_engine, dataloader):
with lr_finder.attach(dummy_engine, to_save=to_save) as trainer_with_finder:
trainer_with_finder.run(dataloader)
assert lr_finder.get_results() is not None
for event in dummy_engine._event_handlers:
assert len(dummy_engine._event_handlers[event]) == 0
def test_wrong_values_start_lr_and_end_lr(
lr_finder, dummy_engine, to_save, dummy_engine_mulitple_param_groups, to_save_mulitple_param_groups
):
with pytest.raises(ValueError, match=r"start_lr must be less than end_lr"):
with lr_finder.attach(dummy_engine, to_save=to_save, start_lr=10.0, end_lr=1.0):
pass
with pytest.raises(ValueError, match=r"start_lr must be less than end_lr"):
with lr_finder.attach(
dummy_engine_mulitple_param_groups,
to_save=to_save_mulitple_param_groups,
start_lr=[1.0, 10.0, 5.0],
end_lr=[10.0, 10.0, 10.0],
):
pass
def test_model_optimizer_reset(lr_finder, to_save, dummy_engine, dataloader):
optimizer = to_save["optimizer"]
model = to_save["model"]
init_optimizer_sd = copy.deepcopy(optimizer.state_dict())
init_model_sd = copy.deepcopy(model.state_dict())
init_trainer_sd = copy.deepcopy(dummy_engine.state_dict())
with pytest.warns(UserWarning, match=r"Run completed without loss diverging"):
with lr_finder.attach(dummy_engine, to_save=to_save, diverge_th=float("inf")) as trainer_with_finder:
trainer_with_finder.run(dataloader)
assert init_optimizer_sd == optimizer.state_dict()
for tensor1, tensor2 in zip(init_model_sd.values(), model.state_dict().values()):
assert torch.all(torch.eq(tensor1, tensor2))
assert init_trainer_sd == dummy_engine.state_dict()
def test_lr_policy(lr_finder, to_save, dummy_engine, dataloader):
with lr_finder.attach(dummy_engine, to_save=to_save, step_mode="linear") as trainer_with_finder:
trainer_with_finder.run(dataloader)
lr = lr_finder.get_results()["lr"]
assert all([lr[i - 1] < lr[i] for i in range(1, len(lr))])
with lr_finder.attach(dummy_engine, to_save=to_save, step_mode="exp") as trainer_with_finder:
trainer_with_finder.run(dataloader)
lr = lr_finder.get_results()["lr"]
assert all([lr[i - 1] < lr[i] for i in range(1, len(lr))])
@pytest.mark.parametrize("step_mode", ["exp", "linear"])
def test_multiple_optimizers(
lr_finder, dummy_engine_mulitple_param_groups, to_save_mulitple_param_groups, dataloader, step_mode
):
start_lr = [0.1, 0.1, 0.01]
end_lr = [1.0, 1.0, 1.0]
with lr_finder.attach(
dummy_engine_mulitple_param_groups,
to_save_mulitple_param_groups,
start_lr=start_lr,
end_lr=end_lr,
step_mode=step_mode,
) as trainer:
trainer.run(dataloader)
groups_lrs = lr_finder.get_results()["lr"]
assert [all([group_lrs[i - 1] < group_lrs[i] for i in range(1, len(group_lrs))]) for group_lrs in groups_lrs]
def assert_output_sizes(lr_finder, dummy_engine):
iteration = dummy_engine.state.iteration
lr_finder_results = lr_finder.get_results()
lr, loss = lr_finder_results["lr"], lr_finder_results["loss"]
assert len(lr) == len(loss) == iteration
def test_num_iter_is_none(lr_finder, to_save, dummy_engine, dataloader):
with pytest.warns(UserWarning, match=r"Run completed without loss diverging"):
with lr_finder.attach(dummy_engine, to_save=to_save, diverge_th=float("inf")) as trainer_with_finder:
trainer_with_finder.run(dataloader)
assert_output_sizes(lr_finder, dummy_engine)
assert dummy_engine.state.iteration == len(dataloader)
def test_num_iter_is_enough(lr_finder, to_save, dummy_engine, dataloader):
with pytest.warns(UserWarning, match=r"Run completed without loss diverging"):
with lr_finder.attach(
dummy_engine, to_save=to_save, num_iter=50, diverge_th=float("inf")
) as trainer_with_finder:
trainer_with_finder.run(dataloader)
assert_output_sizes(lr_finder, dummy_engine)
# -1 because it terminates when state.iteration > num_iter
assert dummy_engine.state.iteration - 1 == 50
def test_num_iter_is_not_enough(lr_finder, to_save, dummy_engine, dataloader):
with lr_finder.attach(dummy_engine, to_save, num_iter=150, diverge_th=float("inf")) as trainer_with_finder:
with pytest.warns(UserWarning):
trainer_with_finder.run(dataloader)
assert_output_sizes(lr_finder, dummy_engine)
assert dummy_engine.state.iteration != len(dataloader)
assert dummy_engine.state.iteration == 150
def test_detach_terminates(lr_finder, to_save, dummy_engine, dataloader):
with lr_finder.attach(dummy_engine, to_save, end_lr=100.0, diverge_th=2) as trainer_with_finder:
trainer_with_finder.run(dataloader)
dummy_engine.run(dataloader, max_epochs=3)
assert dummy_engine.state.epoch == 3
def test_different_num_iters(lr_finder, to_save, dummy_engine, dataloader):
with pytest.warns(UserWarning, match=r"Run completed without loss diverging"):
with lr_finder.attach(dummy_engine, to_save, num_iter=200, diverge_th=float("inf")) as trainer_with_finder:
trainer_with_finder.run(dataloader)
assert trainer_with_finder.state.iteration == 200 # num_iter
with pytest.warns(UserWarning, match=r"Run completed without loss diverging"):
with lr_finder.attach(dummy_engine, to_save, num_iter=1000, diverge_th=float("inf")) as trainer_with_finder:
trainer_with_finder.run(dataloader)
assert trainer_with_finder.state.iteration == 1000 # num_iter
@pytest.mark.parametrize("step_mode", ["exp", "linear"])
def test_start_lr(lr_finder, to_save, dummy_engine, dataloader, step_mode):
with lr_finder.attach(
dummy_engine, to_save, start_lr=0.01, end_lr=10.0, num_iter=5, step_mode=step_mode, diverge_th=1
) as trainer_with_finder:
trainer_with_finder.run(dataloader)
history = lr_finder.get_results()
if step_mode == "exp":
assert 0.01 < history["lr"][0] < 0.16
else:
assert pytest.approx(history["lr"][0]) == 0.01
def test_engine_output_type(lr_finder, dummy_engine, optimizer):
from ignite.handlers.param_scheduler import PiecewiseLinear
dummy_engine.state.iteration = 1
dummy_engine.state.output = [10]
with pytest.raises(TypeError, match=r"output of the engine should be of type float or 0d torch.Tensor"):
lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1)
dummy_engine.state.output = (10, 5)
with pytest.raises(TypeError, match=r"output of the engine should be of type float or 0d torch.Tensor"):
lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1)
dummy_engine.state.output = torch.tensor([1, 2], dtype=torch.float32)
with pytest.raises(ValueError, match=r"if output of the engine is torch.Tensor"):
lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1)
lr_finder._lr_schedule = PiecewiseLinear(
optimizer, param_name="lr", milestones_values=[(0, optimizer.param_groups[0]["lr"]), (100, 10)]
)
dummy_engine.state.output = torch.tensor(10.0, dtype=torch.float32)
lr_finder._history = {"lr": [], "loss": []}
lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1)
loss = lr_finder._history["loss"][-1]
assert type(loss) is float
dummy_engine.state.output = torch.tensor([10.0], dtype=torch.float32)
lr_finder._history = {"lr": [], "loss": []}
lr_finder._log_lr_and_loss(dummy_engine, output_transform=lambda x: x, smooth_f=0, diverge_th=1)
loss = lr_finder._history["loss"][-1]
assert type(loss) is float
def test_lr_suggestion_unexpected_curve(lr_finder, to_save, dummy_engine, dataloader):
with lr_finder.attach(dummy_engine, to_save) as trainer_with_finder:
trainer_with_finder.run(dataloader)
lr_finder._history["loss"].insert(0, 0)
with pytest.raises(
RuntimeError, match=r"FastaiLRFinder got unexpected curve shape, the curve should be somehow U-shaped"
):
lr_finder.lr_suggestion()
def test_lr_suggestion_single_param_group(lr_finder): # , to_save, dummy_engine, dataloader):
import numpy as np
noise = 0.05
lr_finder._history["loss"] = np.linspace(-5.0, 5.0, num=100) ** 2 + noise
lr_finder._history["lr"] = np.linspace(0.01, 10, num=100)
# lr_finder.lr_suggestion() is supposed to return a value, but as
# we assign loss and lr to tensors, instead of lists, it will return tensors
suggested_lr = lr_finder.lr_suggestion()
assert pytest.approx(suggested_lr.item()) == 0.110909089
def test_lr_suggestion_multiple_param_groups(lr_finder):
import numpy as np
noise = 0.06
lr_finder._history["loss"] = np.linspace(-5.0, 5, num=50) ** 2 + noise
# 2 param_groups
lr_finder._history["lr"] = np.linspace(0.01, 10, num=100).reshape(50, 2)
# lr_finder.lr_suggestion() is supposed to return a list of values,
# but as we assign loss and lr to tensors, instead of lists, it will return tensors
suggested_lrs = lr_finder.lr_suggestion()
assert pytest.approx(suggested_lrs[0].item()) == 0.21181818
assert pytest.approx(suggested_lrs[1].item()) == 0.31272727
def test_lr_suggestion_mnist(lr_finder, mnist_to_save, dummy_engine_mnist, mnist_dataloader):
max_iters = 50
with lr_finder.attach(dummy_engine_mnist, mnist_to_save, diverge_th=2, step_mode="linear") as trainer_with_finder:
with trainer_with_finder.add_event_handler(
Events.ITERATION_COMPLETED(once=max_iters), lambda _: trainer_with_finder.terminate()
):
trainer_with_finder.run(mnist_dataloader)
assert 1e-4 <= lr_finder.lr_suggestion() <= 2
def test_apply_suggested_lr_unmatched_optimizers(
lr_finder, mnist_to_save, dummy_engine_mnist, optimizer_multiple_param_groups, mnist_dataloader
):
with lr_finder.attach(dummy_engine_mnist, mnist_to_save) as trainer_with_finder:
trainer_with_finder.run(mnist_dataloader)
sug_lr = lr_finder.lr_suggestion()
with pytest.raises(RuntimeError, match=r"The number of parameter groups does not match"):
lr_finder.apply_suggested_lr(optimizer_multiple_param_groups)
def test_apply_suggested_lr_single_param_groups(
lr_finder, mnist_to_save, dummy_engine_mnist, mnist_optimizer, mnist_dataloader
):
with lr_finder.attach(dummy_engine_mnist, mnist_to_save) as trainer_with_finder:
trainer_with_finder.run(mnist_dataloader)
sug_lr = lr_finder.lr_suggestion()
lr_finder.apply_suggested_lr(mnist_optimizer)
assert mnist_optimizer.param_groups[0]["lr"] == sug_lr
def test_apply_suggested_lr_multiple_param_groups(
lr_finder,
to_save_mulitple_param_groups,
dummy_engine_mulitple_param_groups,
optimizer_multiple_param_groups,
dataloader_plot,
):
with lr_finder.attach(dummy_engine_mulitple_param_groups, to_save_mulitple_param_groups) as trainer_with_finder:
trainer_with_finder.run(dataloader_plot)
sug_lr = lr_finder.lr_suggestion()
lr_finder.apply_suggested_lr(optimizer_multiple_param_groups)
for i in range(len(sug_lr)):
assert optimizer_multiple_param_groups.param_groups[i]["lr"] == sug_lr[i]
def test_no_matplotlib(no_site_packages, lr_finder):
with pytest.raises(ModuleNotFoundError, match=r"This method requires matplotlib to be installed"):
lr_finder.plot()
def test_plot_single_param_group(dirname, lr_finder, mnist_to_save, dummy_engine_mnist, mnist_dataloader):
with lr_finder.attach(dummy_engine_mnist, mnist_to_save, end_lr=20.0, smooth_f=0.04) as trainer_with_finder:
trainer_with_finder.run(mnist_dataloader)
def _test(ax):
assert ax is not None
assert ax.get_xscale() == "log"
assert ax.get_xlabel() == "Learning rate"
assert ax.get_ylabel() == "Loss"
filepath = Path(dirname) / "dummy.jpg"
ax.figure.savefig(filepath)
assert filepath.exists()
filepath.unlink()
lr_finder.plot()
ax = lr_finder.plot(skip_end=0)
_test(ax)
# Passing axes object
from matplotlib import pyplot as plt
_, ax = plt.subplots()
lr_finder.plot(skip_end=0, ax=ax)
_test(ax)
def test_plot_multiple_param_groups(
dirname, lr_finder, to_save_mulitple_param_groups, dummy_engine_mulitple_param_groups, dataloader_plot
):
with lr_finder.attach(
dummy_engine_mulitple_param_groups, to_save_mulitple_param_groups, end_lr=20.0, smooth_f=0.04
) as trainer_with_finder:
trainer_with_finder.run(dataloader_plot)
def _test(ax):
assert ax is not None
assert ax.get_xscale() == "log"
assert ax.get_xlabel() == "Learning rate"
assert ax.get_ylabel() == "Loss"
filepath = Path(dirname) / "dummy_muliple_param_groups.jpg"
ax.figure.savefig(filepath)
assert filepath.exists()
filepath.unlink()
ax = lr_finder.plot(skip_start=0, skip_end=0)
_test(ax)
# Passing axes object
from matplotlib import pyplot as plt
_, ax = plt.subplots()
lr_finder.plot(skip_start=0, skip_end=0, ax=ax)
_test(ax)
def _test_distrib_log_lr_and_loss(device):
from ignite.handlers import ParamScheduler
lr_finder = FastaiLRFinder()
_lr_schedule = MagicMock(spec=ParamScheduler)
# minimal setup for lr_finder to make _log_lr_and_loss work
rank = idist.get_rank()
loss = 0.01 * (rank + 1)
engine = Engine(lambda e, b: None)
engine.state.output = loss
engine.state.iteration = 1
lr_finder._lr_schedule = _lr_schedule
lr_finder._history["loss"] = []
lr_finder._history["lr"] = []
lr_finder._log_lr_and_loss(engine, output_transform=lambda x: x, smooth_f=0.1, diverge_th=10.0)
expected_loss = idist.all_reduce(loss)
assert pytest.approx(lr_finder._history["loss"][-1]) == expected_loss
def _test_distrib_integration_mnist(dirname, device):
from torch.utils.data import DataLoader
from torchvision.datasets import MNIST
from torchvision.transforms import Compose, Normalize, ToTensor
data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,))])
train_loader = DataLoader(
MNIST(download=True, root="/tmp", transform=data_transform, train=True), batch_size=256, shuffle=True
)
class DummyModel(nn.Module):
def __init__(self, n_channels=10, out_channels=1, flatten_input=False):
super(DummyModel, self).__init__()
self.net = nn.Sequential(
nn.Flatten() if flatten_input else nn.Identity(), nn.Linear(n_channels, out_channels)
)
def forward(self, x):
return self.net(x)
model = DummyModel(n_channels=784, out_channels=10, flatten_input=True)
model = model.to(device)
optimizer = SGD(model.parameters(), lr=1e-4, momentum=0.0)
to_save = {"model": model, "optimizer": optimizer}
engine = create_supervised_trainer(model, optimizer, nn.CrossEntropyLoss(), device=device)
lr_finder = FastaiLRFinder()
with lr_finder.attach(engine, to_save) as trainer_with_finder:
trainer_with_finder.run(train_loader)
lr_finder.plot()
if idist.get_rank() == 0:
ax = lr_finder.plot(skip_end=0)
filepath = Path(dirname) / "distrib_dummy.jpg"
ax.figure.savefig(filepath)
assert filepath.exists()
sug_lr = lr_finder.lr_suggestion()
assert 1e-3 <= sug_lr <= 1
lr_finder.apply_suggested_lr(optimizer)
assert optimizer.param_groups[0]["lr"] == sug_lr
@pytest.mark.distributed
@pytest.mark.skipif(not idist.has_native_dist_support, reason="Skip if no native dist support")
def test_distrib_gloo_cpu_or_gpu(dirname, distributed_context_single_node_gloo):
device = idist.device()
_test_distrib_log_lr_and_loss(device)
_test_distrib_integration_mnist(dirname, device)
@pytest.mark.distributed
@pytest.mark.skipif(not idist.has_native_dist_support, reason="Skip if no native dist support")
@pytest.mark.skipif(torch.cuda.device_count() < 1, reason="Skip if no GPU")
def test_distrib_nccl_gpu(dirname, distributed_context_single_node_nccl):
device = idist.device()
_test_distrib_log_lr_and_loss(device)
_test_distrib_integration_mnist(dirname, device)
@pytest.mark.tpu
@pytest.mark.skipif("NUM_TPU_WORKERS" in os.environ, reason="Skip if NUM_TPU_WORKERS is in env vars")
@pytest.mark.skipif(not idist.has_xla_support, reason="Not on TPU device")
def test_distrib_single_device_xla(dirname):
device = idist.device()
assert "xla" in device.type
_test_distrib_log_lr_and_loss(device)
_test_distrib_integration_mnist(dirname, device)
def _test_distrib_log_lr_and_loss_xla_nprocs(index, dirname):
device = idist.device()
_test_distrib_log_lr_and_loss(device)
_test_distrib_integration_mnist(dirname, device)
import time
# hack to have all proc properly sync:
time.sleep(1)
@pytest.mark.tpu
@pytest.mark.skipif("NUM_TPU_WORKERS" not in os.environ, reason="Skip if no NUM_TPU_WORKERS is in env vars")
@pytest.mark.skipif(not idist.has_xla_support, reason="Not on TPU device")
def test_distrib_xla_nprocs(dirname, xmp_executor):
n = int(os.environ["NUM_TPU_WORKERS"])
xmp_executor(_test_distrib_log_lr_and_loss_xla_nprocs, args=(dirname,), nprocs=n)
| [
"noreply@github.com"
] | pytorch.noreply@github.com |
78cdac41765e901ac43ff0a7a706f6cdf8a14db2 | 42c48f3178a48b4a2a0aded547770027bf976350 | /google/ads/google_ads/v5/proto/services/keyword_plan_ad_group_service_pb2.py | 901820cb46b24fd64b06a8624f8446136d512d1b | [
"Apache-2.0"
] | permissive | fiboknacky/google-ads-python | e989464a85f28baca1f28d133994c73759e8b4d6 | a5b6cede64f4d9912ae6ad26927a54e40448c9fe | refs/heads/master | 2021-08-07T20:18:48.618563 | 2020-12-11T09:21:29 | 2020-12-11T09:21:29 | 229,712,514 | 0 | 0 | Apache-2.0 | 2019-12-23T08:44:49 | 2019-12-23T08:44:49 | null | UTF-8 | Python | false | true | 22,048 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v5/proto/services/keyword_plan_ad_group_service.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v5.proto.resources import keyword_plan_ad_group_pb2 as google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_keyword__plan__ad__group__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.api import client_pb2 as google_dot_api_dot_client__pb2
from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v5/proto/services/keyword_plan_ad_group_service.proto',
package='google.ads.googleads.v5.services',
syntax='proto3',
serialized_options=b'\n$com.google.ads.googleads.v5.servicesB\036KeywordPlanAdGroupServiceProtoP\001ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v5/services;services\242\002\003GAA\252\002 Google.Ads.GoogleAds.V5.Services\312\002 Google\\Ads\\GoogleAds\\V5\\Services\352\002$Google::Ads::GoogleAds::V5::Services',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\nJgoogle/ads/googleads_v5/proto/services/keyword_plan_ad_group_service.proto\x12 google.ads.googleads.v5.services\x1a\x43google/ads/googleads_v5/proto/resources/keyword_plan_ad_group.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/rpc/status.proto\"j\n\x1cGetKeywordPlanAdGroupRequest\x12J\n\rresource_name\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+googleads.googleapis.com/KeywordPlanAdGroup\"\xc4\x01\n MutateKeywordPlanAdGroupsRequest\x12\x18\n\x0b\x63ustomer_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12V\n\noperations\x18\x02 \x03(\x0b\x32=.google.ads.googleads.v5.services.KeywordPlanAdGroupOperationB\x03\xe0\x41\x02\x12\x17\n\x0fpartial_failure\x18\x03 \x01(\x08\x12\x15\n\rvalidate_only\x18\x04 \x01(\x08\"\xff\x01\n\x1bKeywordPlanAdGroupOperation\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12G\n\x06\x63reate\x18\x01 \x01(\x0b\x32\x35.google.ads.googleads.v5.resources.KeywordPlanAdGroupH\x00\x12G\n\x06update\x18\x02 \x01(\x0b\x32\x35.google.ads.googleads.v5.resources.KeywordPlanAdGroupH\x00\x12\x10\n\x06remove\x18\x03 \x01(\tH\x00\x42\x0b\n\toperation\"\xa9\x01\n!MutateKeywordPlanAdGroupsResponse\x12\x31\n\x15partial_failure_error\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12Q\n\x07results\x18\x02 \x03(\x0b\x32@.google.ads.googleads.v5.services.MutateKeywordPlanAdGroupResult\"7\n\x1eMutateKeywordPlanAdGroupResult\x12\x15\n\rresource_name\x18\x01 \x01(\t2\x9d\x04\n\x19KeywordPlanAdGroupService\x12\xdd\x01\n\x15GetKeywordPlanAdGroup\x12>.google.ads.googleads.v5.services.GetKeywordPlanAdGroupRequest\x1a\x35.google.ads.googleads.v5.resources.KeywordPlanAdGroup\"M\x82\xd3\xe4\x93\x02\x37\x12\x35/v5/{resource_name=customers/*/keywordPlanAdGroups/*}\xda\x41\rresource_name\x12\x82\x02\n\x19MutateKeywordPlanAdGroups\x12\x42.google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsRequest\x1a\x43.google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsResponse\"\\\x82\xd3\xe4\x93\x02=\"8/v5/customers/{customer_id=*}/keywordPlanAdGroups:mutate:\x01*\xda\x41\x16\x63ustomer_id,operations\x1a\x1b\xca\x41\x18googleads.googleapis.comB\x85\x02\n$com.google.ads.googleads.v5.servicesB\x1eKeywordPlanAdGroupServiceProtoP\x01ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v5/services;services\xa2\x02\x03GAA\xaa\x02 Google.Ads.GoogleAds.V5.Services\xca\x02 Google\\Ads\\GoogleAds\\V5\\Services\xea\x02$Google::Ads::GoogleAds::V5::Servicesb\x06proto3'
,
dependencies=[google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_keyword__plan__ad__group__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_client__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,google_dot_api_dot_resource__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,])
_GETKEYWORDPLANADGROUPREQUEST = _descriptor.Descriptor(
name='GetKeywordPlanAdGroupRequest',
full_name='google.ads.googleads.v5.services.GetKeywordPlanAdGroupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v5.services.GetKeywordPlanAdGroupRequest.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\002\372A-\n+googleads.googleapis.com/KeywordPlanAdGroup', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=355,
serialized_end=461,
)
_MUTATEKEYWORDPLANADGROUPSREQUEST = _descriptor.Descriptor(
name='MutateKeywordPlanAdGroupsRequest',
full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='customer_id', full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsRequest.customer_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\002', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='operations', full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsRequest.operations', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\002', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='partial_failure', full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsRequest.partial_failure', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='validate_only', full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsRequest.validate_only', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=464,
serialized_end=660,
)
_KEYWORDPLANADGROUPOPERATION = _descriptor.Descriptor(
name='KeywordPlanAdGroupOperation',
full_name='google.ads.googleads.v5.services.KeywordPlanAdGroupOperation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='update_mask', full_name='google.ads.googleads.v5.services.KeywordPlanAdGroupOperation.update_mask', index=0,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='create', full_name='google.ads.googleads.v5.services.KeywordPlanAdGroupOperation.create', index=1,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='update', full_name='google.ads.googleads.v5.services.KeywordPlanAdGroupOperation.update', index=2,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='remove', full_name='google.ads.googleads.v5.services.KeywordPlanAdGroupOperation.remove', index=3,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='operation', full_name='google.ads.googleads.v5.services.KeywordPlanAdGroupOperation.operation',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=663,
serialized_end=918,
)
_MUTATEKEYWORDPLANADGROUPSRESPONSE = _descriptor.Descriptor(
name='MutateKeywordPlanAdGroupsResponse',
full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='partial_failure_error', full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsResponse.partial_failure_error', index=0,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='results', full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsResponse.results', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=921,
serialized_end=1090,
)
_MUTATEKEYWORDPLANADGROUPRESULT = _descriptor.Descriptor(
name='MutateKeywordPlanAdGroupResult',
full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v5.services.MutateKeywordPlanAdGroupResult.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1092,
serialized_end=1147,
)
_MUTATEKEYWORDPLANADGROUPSREQUEST.fields_by_name['operations'].message_type = _KEYWORDPLANADGROUPOPERATION
_KEYWORDPLANADGROUPOPERATION.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK
_KEYWORDPLANADGROUPOPERATION.fields_by_name['create'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_keyword__plan__ad__group__pb2._KEYWORDPLANADGROUP
_KEYWORDPLANADGROUPOPERATION.fields_by_name['update'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_keyword__plan__ad__group__pb2._KEYWORDPLANADGROUP
_KEYWORDPLANADGROUPOPERATION.oneofs_by_name['operation'].fields.append(
_KEYWORDPLANADGROUPOPERATION.fields_by_name['create'])
_KEYWORDPLANADGROUPOPERATION.fields_by_name['create'].containing_oneof = _KEYWORDPLANADGROUPOPERATION.oneofs_by_name['operation']
_KEYWORDPLANADGROUPOPERATION.oneofs_by_name['operation'].fields.append(
_KEYWORDPLANADGROUPOPERATION.fields_by_name['update'])
_KEYWORDPLANADGROUPOPERATION.fields_by_name['update'].containing_oneof = _KEYWORDPLANADGROUPOPERATION.oneofs_by_name['operation']
_KEYWORDPLANADGROUPOPERATION.oneofs_by_name['operation'].fields.append(
_KEYWORDPLANADGROUPOPERATION.fields_by_name['remove'])
_KEYWORDPLANADGROUPOPERATION.fields_by_name['remove'].containing_oneof = _KEYWORDPLANADGROUPOPERATION.oneofs_by_name['operation']
_MUTATEKEYWORDPLANADGROUPSRESPONSE.fields_by_name['partial_failure_error'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_MUTATEKEYWORDPLANADGROUPSRESPONSE.fields_by_name['results'].message_type = _MUTATEKEYWORDPLANADGROUPRESULT
DESCRIPTOR.message_types_by_name['GetKeywordPlanAdGroupRequest'] = _GETKEYWORDPLANADGROUPREQUEST
DESCRIPTOR.message_types_by_name['MutateKeywordPlanAdGroupsRequest'] = _MUTATEKEYWORDPLANADGROUPSREQUEST
DESCRIPTOR.message_types_by_name['KeywordPlanAdGroupOperation'] = _KEYWORDPLANADGROUPOPERATION
DESCRIPTOR.message_types_by_name['MutateKeywordPlanAdGroupsResponse'] = _MUTATEKEYWORDPLANADGROUPSRESPONSE
DESCRIPTOR.message_types_by_name['MutateKeywordPlanAdGroupResult'] = _MUTATEKEYWORDPLANADGROUPRESULT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetKeywordPlanAdGroupRequest = _reflection.GeneratedProtocolMessageType('GetKeywordPlanAdGroupRequest', (_message.Message,), {
'DESCRIPTOR' : _GETKEYWORDPLANADGROUPREQUEST,
'__module__' : 'google.ads.googleads_v5.proto.services.keyword_plan_ad_group_service_pb2'
,
'__doc__': """Request message for [KeywordPlanAdGroupService.GetKeywordPlanAdGroup][
google.ads.googleads.v5.services.KeywordPlanAdGroupService.GetKeywordP
lanAdGroup].
Attributes:
resource_name:
Required. The resource name of the Keyword Plan ad group to
fetch.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.GetKeywordPlanAdGroupRequest)
})
_sym_db.RegisterMessage(GetKeywordPlanAdGroupRequest)
MutateKeywordPlanAdGroupsRequest = _reflection.GeneratedProtocolMessageType('MutateKeywordPlanAdGroupsRequest', (_message.Message,), {
'DESCRIPTOR' : _MUTATEKEYWORDPLANADGROUPSREQUEST,
'__module__' : 'google.ads.googleads_v5.proto.services.keyword_plan_ad_group_service_pb2'
,
'__doc__': """Request message for [KeywordPlanAdGroupService.MutateKeywordPlanAdGrou
ps][google.ads.googleads.v5.services.KeywordPlanAdGroupService.MutateK
eywordPlanAdGroups].
Attributes:
customer_id:
Required. The ID of the customer whose Keyword Plan ad groups
are being modified.
operations:
Required. The list of operations to perform on individual
Keyword Plan ad groups.
partial_failure:
If true, successful operations will be carried out and invalid
operations will return errors. If false, all operations will
be carried out in one transaction if and only if they are all
valid. Default is false.
validate_only:
If true, the request is validated but not executed. Only
errors are returned, not results.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsRequest)
})
_sym_db.RegisterMessage(MutateKeywordPlanAdGroupsRequest)
KeywordPlanAdGroupOperation = _reflection.GeneratedProtocolMessageType('KeywordPlanAdGroupOperation', (_message.Message,), {
'DESCRIPTOR' : _KEYWORDPLANADGROUPOPERATION,
'__module__' : 'google.ads.googleads_v5.proto.services.keyword_plan_ad_group_service_pb2'
,
'__doc__': """A single operation (create, update, remove) on a Keyword Plan ad
group.
Attributes:
update_mask:
The FieldMask that determines which resource fields are
modified in an update.
operation:
The mutate operation.
create:
Create operation: No resource name is expected for the new
Keyword Plan ad group.
update:
Update operation: The Keyword Plan ad group is expected to
have a valid resource name.
remove:
Remove operation: A resource name for the removed Keyword Plan
ad group is expected, in this format: ``customers/{customer_i
d}/keywordPlanAdGroups/{kp_ad_group_id}``
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.KeywordPlanAdGroupOperation)
})
_sym_db.RegisterMessage(KeywordPlanAdGroupOperation)
MutateKeywordPlanAdGroupsResponse = _reflection.GeneratedProtocolMessageType('MutateKeywordPlanAdGroupsResponse', (_message.Message,), {
'DESCRIPTOR' : _MUTATEKEYWORDPLANADGROUPSRESPONSE,
'__module__' : 'google.ads.googleads_v5.proto.services.keyword_plan_ad_group_service_pb2'
,
'__doc__': """Response message for a Keyword Plan ad group mutate.
Attributes:
partial_failure_error:
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial\_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results:
All results for the mutate. The order of the results is
determined by the order of the keywords in the original
request.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.MutateKeywordPlanAdGroupsResponse)
})
_sym_db.RegisterMessage(MutateKeywordPlanAdGroupsResponse)
MutateKeywordPlanAdGroupResult = _reflection.GeneratedProtocolMessageType('MutateKeywordPlanAdGroupResult', (_message.Message,), {
'DESCRIPTOR' : _MUTATEKEYWORDPLANADGROUPRESULT,
'__module__' : 'google.ads.googleads_v5.proto.services.keyword_plan_ad_group_service_pb2'
,
'__doc__': """The result for the Keyword Plan ad group mutate.
Attributes:
resource_name:
Returned for successful operations.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.MutateKeywordPlanAdGroupResult)
})
_sym_db.RegisterMessage(MutateKeywordPlanAdGroupResult)
DESCRIPTOR._options = None
_GETKEYWORDPLANADGROUPREQUEST.fields_by_name['resource_name']._options = None
_MUTATEKEYWORDPLANADGROUPSREQUEST.fields_by_name['customer_id']._options = None
_MUTATEKEYWORDPLANADGROUPSREQUEST.fields_by_name['operations']._options = None
_KEYWORDPLANADGROUPSERVICE = _descriptor.ServiceDescriptor(
name='KeywordPlanAdGroupService',
full_name='google.ads.googleads.v5.services.KeywordPlanAdGroupService',
file=DESCRIPTOR,
index=0,
serialized_options=b'\312A\030googleads.googleapis.com',
create_key=_descriptor._internal_create_key,
serialized_start=1150,
serialized_end=1691,
methods=[
_descriptor.MethodDescriptor(
name='GetKeywordPlanAdGroup',
full_name='google.ads.googleads.v5.services.KeywordPlanAdGroupService.GetKeywordPlanAdGroup',
index=0,
containing_service=None,
input_type=_GETKEYWORDPLANADGROUPREQUEST,
output_type=google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_keyword__plan__ad__group__pb2._KEYWORDPLANADGROUP,
serialized_options=b'\202\323\344\223\0027\0225/v5/{resource_name=customers/*/keywordPlanAdGroups/*}\332A\rresource_name',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='MutateKeywordPlanAdGroups',
full_name='google.ads.googleads.v5.services.KeywordPlanAdGroupService.MutateKeywordPlanAdGroups',
index=1,
containing_service=None,
input_type=_MUTATEKEYWORDPLANADGROUPSREQUEST,
output_type=_MUTATEKEYWORDPLANADGROUPSRESPONSE,
serialized_options=b'\202\323\344\223\002=\"8/v5/customers/{customer_id=*}/keywordPlanAdGroups:mutate:\001*\332A\026customer_id,operations',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_KEYWORDPLANADGROUPSERVICE)
DESCRIPTOR.services_by_name['KeywordPlanAdGroupService'] = _KEYWORDPLANADGROUPSERVICE
# @@protoc_insertion_point(module_scope)
| [
"noreply@github.com"
] | fiboknacky.noreply@github.com |
dbf5708023c1cd6e9fa27007bec608a1b0a11915 | 3b786d3854e830a4b46ee55851ca186becbfa650 | /SystemTesting/pylib/vmware/nsx/manager/csr/csr_facade.py | c748ebec7776c2f888198da7e0fcf182ec60e611 | [] | no_license | Cloudxtreme/MyProject | d81f8d38684333c22084b88141b712c78b140777 | 5b55817c050b637e2747084290f6206d2e622938 | refs/heads/master | 2021-05-31T10:26:42.951835 | 2015-12-10T09:57:04 | 2015-12-10T09:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,118 | py | import vmware.common.base_facade as base_facade
import vmware.common.constants as constants
import vmware.common.global_config as global_config
import vmware.nsx.manager.csr.api.csr_api_client as csr_api_client
import vmware.nsx.manager.csr.cli.csr_cli_client as csr_cli_client
import vmware.nsx.manager.csr.csr as csr
pylogger = global_config.pylogger
class CSRFacade(csr.CSR, base_facade.BaseFacade):
"""CSR facade class to perform CRUDAQ"""
DEFAULT_EXECUTION_TYPE = constants.ExecutionType.API
DEFAULT_IMPLEMENTATION_VERSION = "NSX70"
def __init__(self, parent=None, id_=None):
super(CSRFacade, self).__init__(parent=parent, id_=id_)
# instantiate client objects
api_client = csr_api_client.CSRAPIClient(
parent=parent.get_client(constants.ExecutionType.API))
cli_client = csr_cli_client.CSRCLIClient(
parent=parent.get_client(constants.ExecutionType.CLI))
# Maintain the list of client objects.
self._clients = {constants.ExecutionType.API: api_client,
constants.ExecutionType.CLI: cli_client}
| [
"bpei@vmware.com"
] | bpei@vmware.com |
d794b2638f627cae92847c422f07455bd2e63473 | 93cc2b7590433228444a56daf9f6e0991728867e | /backend/courses/serializer.py | b9ecbba5790f4e7f95d7d8826b694ffda60a7f9d | [] | no_license | MisterLenivec/rating_app | b50fb5353634914a914ddf36831d0fa086d04530 | 65111424159fd730a89678386d9e422fddcdcde8 | refs/heads/master | 2022-12-07T07:25:23.748013 | 2020-08-30T12:04:06 | 2020-08-30T12:04:06 | 290,445,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | from rest_framework.serializers import ModelSerializer
from .models import Courses
class CourseSerializer(ModelSerializer):
"""Добавление курсов"""
class Meta:
model = Courses
fields = ['id', 'name', 'url', 'rating']
| [
"wormsom@gmail.com"
] | wormsom@gmail.com |
766f30af828e5f9361d2718239e0366426beed40 | 46a4ee07809d71700bd72c2fe1b5e3c46568f1b0 | /backend/manage.py | b19ffd4d0d26352e86a016f331dd4a194ecce5c4 | [] | no_license | crowdbotics-apps/audiofocus-5111 | 9e4e396b36b1bc47e8a39f4f8732406dbc489cdf | 815801afc917801d859d50f2986e8535d61e5cfa | refs/heads/master | 2022-12-12T06:03:47.893639 | 2019-06-24T19:23:51 | 2019-06-24T19:23:51 | 193,565,731 | 0 | 0 | null | 2022-12-09T07:34:41 | 2019-06-24T19:23:29 | Python | UTF-8 | Python | false | false | 635 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'audiofocus_5111.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
98802694fabaaad3e27a3a6069d9a7d0df30f372 | 0c52fefc231db4ace1c483b8a6cfd6f716072c2a | /users/migrations/0003_auto_20200903_1234.py | d2b97f81a0a76faec3e8dfff719d00301bb5ca64 | [] | no_license | BrianC68/fam-recipes | 4161849133fe47bcd589b110e24e3e7e75c80527 | 413a943710ae338c922185aaca0aa46307a3ac18 | refs/heads/master | 2022-12-24T01:50:28.304372 | 2020-10-01T14:17:35 | 2020-10-01T14:17:35 | 295,018,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,406 | py | # Generated by Django 3.0.8 on 2020-09-03 17:34
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0002_auto_20200903_1232'),
]
operations = [
migrations.AlterField(
model_name='customuser',
name='child',
field=models.ManyToManyField(blank=True, help_text='Hold Ctrl + Click to choose multiple children.', related_name='_customuser_child_+', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='customuser',
name='cousin',
field=models.ManyToManyField(blank=True, help_text='Hold Ctrl + Click to choose multiple cousins.', related_name='_customuser_cousin_+', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='customuser',
name='parent',
field=models.ManyToManyField(blank=True, help_text='Hold Ctrl + Click to choose multiple parents.', related_name='_customuser_parent_+', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='customuser',
name='sibling',
field=models.ManyToManyField(blank=True, help_text='Hold Ctrl + Click to choose multiple siblings.', related_name='_customuser_sibling_+', to=settings.AUTH_USER_MODEL),
),
]
| [
"brianc@wi.rr.com"
] | brianc@wi.rr.com |
e161d289aef29076b02deb3c136d91069320e6ad | c6a0862b687ff93cb593ba5a35008ebc701fdaa4 | /does_number_look_big.py | 6a2b5305ff4313ed2c34272b28a76c594be54af4 | [] | no_license | tytechortz/codewars | 77deb327cd9d9e8602228ccb246c93a5cec82fe7 | 8517c38e174fac0a8a81a0939f51a6d83ca0e355 | refs/heads/master | 2020-06-03T03:49:31.686202 | 2019-08-26T20:39:50 | 2019-08-26T20:39:50 | 191,425,465 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 215 | py | def narcissistic( value ):
digits = [int(x) for x in str(value)]
length = len(digits)
if sum(i**length for i in digits) == value:
return True
else:
return False
narcissistic(153)
| [
"jmswank7@gmail.com"
] | jmswank7@gmail.com |
67bcf6ef75c8622c725316804886b1e3b0041970 | 3ee2b69a81c9193dd34fdf9c587469adb52c7a6e | /contrib/oscoap-plugtest/plugtest-server | c0e380e0e35530809d366bd407d1d8e0c16e723b | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | pruckebusch/aiocoap | cd2ef6da4f1925e8a477baa95cfcb5173465ed21 | 021915635d912137a1d05ec37486ed4432e0f52d | refs/heads/master | 2021-01-22T20:18:18.465840 | 2017-12-20T14:45:41 | 2017-12-20T14:45:41 | 85,305,911 | 0 | 0 | null | 2017-03-17T11:46:17 | 2017-03-17T11:46:17 | null | UTF-8 | Python | false | false | 7,454 | #!/usr/bin/env python3
# This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak <http://sixpinetrees.blogspot.com/>,
# 2013-2014 Christian Amsüss <c.amsuess@energyharvesting.at>
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""A server suitable for running the OSCOAP plug test series against it
See https://github.com/EricssonResearch/OSCOAP for the test suite
description."""
import sys
import asyncio
import logging
import argparse
import aiocoap
import aiocoap.oscoap as oscoap
import aiocoap.error as error
from aiocoap.util.cli import AsyncCLIDaemon
import aiocoap.resource as resource
from plugtest_common import *
class PleaseUseOscoap(error.ConstructionRenderableError):
code = aiocoap.UNAUTHORIZED
message = "This is an OSCOAP plugtest, please use option %d"%aiocoap.numbers.optionnumbers.OptionNumber.OBJECT_SECURITY
class HelloResource(resource.Resource):
def render_get(self, request):
testno_mode = {('first=1',): 2, ('second=2',): 3}.get(request.opt.uri_query, 1)
additional_verify("Accept as expected", 0 if testno_mode == 3 else None, request.opt.accept)
etag = b"\x2b" if testno_mode in (2, 3) else None
max_age = 5 if testno_mode == 3 else None
return aiocoap.Message(content_format=0, payload="Hello World!".encode('ascii'), etag=etag, max_age=max_age)
class CounterResource(resource.Resource):
def render_post(self, request):
additional_verify("Content-Format as expeted", 0, request.opt.content_format)
additional_verify("Payload as expected", b"\x4a"*4, request.payload)
return aiocoap.Message(code=aiocoap.CHANGED, location_path=('counter',), location_query=('first=1', 'second=2'))
def render_put(self, request):
additional_verify("Content-Format as expeted", 0, request.opt.content_format)
additional_verify("If-Match as expected", (b"\x5b\x5b",), request.opt.if_match)
additional_verify("Payload as expected", b"\x5a"*4, request.payload)
return aiocoap.Message(code=aiocoap.CHANGED)
def render_delete(self, request):
return aiocoap.Message(code=aiocoap.DELETED)
class SeqnoManager(resource.Resource):
def __init__(self, contexts):
self.contexts = contexts
def render_get(self, request):
the_context, = self.contexts.values()
# this direct access is technically outside the interface for a
# SecurityContext, but then again, there isn't one yet
text = """Next sequence number I will use: %d\n""" % the_context.my_sequence_number
text += """I've seen all sequence numbers up to including %d%s.""" % (
the_context.other_replay_window.seen[0],
", and also %s" % the_context.other_replay_window.seen[1:] if len(the_context.other_replay_window.seen) > 1 else ""
)
return aiocoap.Message(payload=text.encode('utf-8'), content_format=0)
def render_put(self, request):
try:
number = int(request.payload.decode('utf8'))
except (ValueError, UnicodeDecodeError):
raise aiocoap.error.BadRequest("Only numeric values are accepted.")
new_context = get_security_context(number, 'recipient')
self.contexts[new_context.cid] = new_context
return aiocoap.Message(code=aiocoap.CHANGED)
class PlugtestSite(resource.Site):
def __init__(self, *, contexts=[]):
super().__init__()
# by now, the testno here should only be used to initialize the sequence numbers
regular_context = get_security_context(1, 'recipient')
self.contexts = {c.cid: c for c in [regular_context, ]}
self.add_resource(('.well-known', 'core'), resource.WKCResource(self.get_resources_as_linkheader))
self.add_resource(('change-tid',), HelloResource())
self.add_resource(('helloworld',), HelloResource())
self.add_resource(('counter',), CounterResource())
self.add_resource(('sequence-numbers',), SeqnoManager(self.contexts))
whitelist = (
('.well-known', 'core'),
('sequence-numbers',)
)
# Most of this is copied from server-oscoap, and needs yet to move into the library
async def render(self, request):
try:
cid, sid = oscoap.verify_start(request)
except oscoap.NotAProtectedMessage:
if request.opt.uri_path in self.whitelist:
return await super().render(request)
else:
raise PleaseUseOscoap()
# right now we'll rely on the sid to match, especially as it's not sent
# unconditionally anyway
try:
sc = self.contexts[cid]
except KeyError:
raise PleaseUseOscoap() # may we disclose the reason?
try:
unprotected, seqno = sc.unprotect(request)
except oscoap.ProtectionInvalid as e:
print("Unprotect failed (%s)"%(e,))
# hack explanation: there is no implementation of a "no response"
# response in aiocoap yet. the string here is not a sentinel but an
# exploitation of aiocoap not handling the type error of not having
# a message early enough to send a 5.00 error instead; it just
# fails to reply at all.
# this also bypasses .response_callback (which might
# auto-increment), which is a good thing because retransmissions
# would send the auto-incrementor off and away
return "NO RESPONSE"
print("Unprotected request:", unprotected)
if unprotected.opt.uri_path == ('change-tid',):
# it seems this is the easiest way to tamper with the Tid as
# requested for test 16.
seqno = seqno + b'?'
# FIXME the render doesn't provide a way to provide context in the
# sense of "who is the user"; obviously, the render interface needs
# rework
try:
response = await super().render(unprotected)
except error.RenderableError as err:
response = err.to_message()
except Exception as rr:
response = aiocoap.Message(code=aiocoap.INTERNAL_SERVER_ERROR)
self.log.error("An exception occurred while rendering a protected resource: %r"%err)
self.log.exception(err)
if response.code is None:
# FIXME: this duplicates the default setting in aiocoap.protocol
response.code = aiocoap.CONTENT
print("Unprotected response:", response)
protected_response, _ = sc.protect(response, seqno)
# FIXME who should trigger this?
sc._store()
return protected_response
class PlugtestServerProgram(AsyncCLIDaemon):
async def start(self):
logging.root.setLevel(logging.WARNING)
p = argparse.ArgumentParser(description="Server for the OSCOAP plug test. Requires a test number to be present.")
opts = p.parse_args()
self.context = await aiocoap.Context.create_server_context(PlugtestSite())
print("Plugtest server ready.")
sys.stdout.flush() # the unit tests might wait abundantly long for this otherwise
async def shutdown(self):
await self.context.shutdown()
if __name__ == "__main__":
PlugtestServerProgram.sync_main()
| [
"chrysn@fsfe.org"
] | chrysn@fsfe.org | |
f456161888d1aada746c90888e1e578f7e93d9b4 | fefb1e9b0b736da4e49d7754f8d1dbaf37f2fa6a | /.history/6_1_20210201203845.py | ed5f98f17b9eaa43f22ca8a496997543f53e6c0b | [] | no_license | wh-debug/python | 5a78a2227874ebc400d075197de0adab9f55d187 | 1467eeda670f170e6e2d7c0a0550f713f1ee9d75 | refs/heads/master | 2023-03-12T22:08:12.608882 | 2021-02-17T09:49:52 | 2021-02-17T09:49:52 | 334,032,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 870 | py | '''
Author: Daylight
Date: 2021-02-01 20:18:57
LastEditTime: 2021-02-01 20:38:45
LastEditors: Please set LastEditors
Description: Practice for dictionaries in python
FilePath: \python\6_1.py
'''
#todo 字典(简单的字典)
alien_0 = {'color': 'green', 'point': 5} #! 保存两个键值
alien_1 = {'colors': 'red'} #! 最简单的字典
print(alien_0['color']) #? 输出字典的某个键值的方法
print(alien_0['point'])
#todo 假设你射杀了一个外星人,将返回你取得的分数(访问字典中的值)
new_points = alien_0['point']
print(f"You just earned {new_points} points!\n")
'''
添加键值对(往字典中添加一个键值对):例子,假如要显示外星人在屏幕中的位置,而开始一般在屏幕的
左上方,需要显示x,y的坐标
'''
alien_0['x_position'] = 0
alien_0['y_position'] = 25
print(alien_0)
| [
"1813763848@qq.com"
] | 1813763848@qq.com |
ad4694defb2d1595a05f17b2c1b017829fcb623f | 70b339d0b2638a7914d0d56c5edf8a2637c9f4b0 | /Facebook-printMergedTwoBST.py | 88d3a8456edc9ece29bc9e44b1cb5338bfaad7e6 | [] | no_license | pflun/advancedAlgorithms | 9991da7514024e18ba08de8688966b9220e12571 | 5520dbcd26999b98e1229bf03c2f62dd690a2ddc | refs/heads/master | 2023-02-19T12:05:26.902535 | 2023-02-14T06:08:54 | 2023-02-14T06:08:54 | 189,055,701 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,313 | py | # -*- coding: utf-8 -*-
# https://www.geeksforgeeks.org/merge-two-bsts-with-limited-extra-space/
# Given two Binary Search Trees(BST), print the elements of both BSTs in sorted
# form. The expected time complexity is O(m+n) where m is the number of nodes in
# first tree and n is the number of nodes in second tree. Maximum allowed
# auxiliary space is O(height of the first tree + height of the second tree).
#
# Examples:
#
# First BST
# 3
# / \
# 1 5
# Second BST
# 4
# / \
# 2 6
# Output: 1 2 3 4 5 6
#
#
# First BST
# 8
# / \
# 2 10
# /
# 1
# Second BST
# 5
# /
# 3
# /
# 0
# Output: 0 1 2 3 5 8 10
from sortedArrayToBST import Solution
class Solution1(object):
def __init__(self, root1, root2):
self.res = []
self.stack1 = []
self.stack2 = []
while root1:
self.stack1.append(root1)
root1 = root1.left
while root2:
self.stack2.append(root2)
root2 = root2.left
def merge(self):
curr1 = self.next1()
curr2 = self.next2()
while len(self.stack1) > 0 and len(self.stack2) > 0:
if curr1 <= curr2:
self.res.append(curr1)
curr1 = self.next1()
else:
self.res.append(curr2)
curr2 = self.next2()
while len(self.stack1) > 0:
curr = self.next1()
self.res.append(curr)
while len(self.stack2) > 0:
curr = self.next2()
self.res.append(curr)
return self.res
# 每次pop栈先检查curr有没有右节点,把右节点入栈并且继续弹入右节点的所有左节点
def next1(self):
curr = self.stack1.pop()
tmp = curr.right
while tmp:
self.stack1.append(tmp)
tmp = tmp.left
return curr.val
def next2(self):
curr = self.stack2.pop()
tmp = curr.right
while tmp:
self.stack2.append(tmp)
tmp = tmp.left
return curr.val
testBST = Solution()
root1 = testBST.sortedArrayToBST([0, 1, 4, 5, 6, 8])
root2 = testBST.sortedArrayToBST([2, 3, 5, 7, 9, 10])
test = Solution1(root1, root2)
print test.merge() | [
"zgao@gwu.edu"
] | zgao@gwu.edu |
3dad4b86386b9c1520d030bd4c33a8aa31878d9e | 32a30dd45236c7c01c971ac128875076305b2a59 | /Interpolation/Composite Variabler.py | 7417f8b9086db1a68f8f2d8a69352f31416ad707 | [
"Apache-2.0"
] | permissive | Mrmohanak/Glyphs-Scripts | 5acb7147e0ca84a1ed15bbfb3e8b1a5fa2b6bd7d | 1293e5649d2f061b044826316b9982420f41ccc5 | refs/heads/master | 2022-11-09T10:28:07.686709 | 2020-06-19T19:11:52 | 2020-06-19T19:11:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,293 | py | #MenuTitle: Composite Variabler
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Reduplicates Brace and Bracket layers of components in the composites in which they are used. Makes brace and bracket layers work in the composites.
"""
import vanilla
class CompoundVariabler( object ):
def __init__( self ):
# Window 'self.w':
windowWidth = 405
windowHeight = 260
windowWidthResize = 100 # user can resize width by this value
windowHeightResize = 0 # user can resize height by this value
self.w = vanilla.FloatingWindow(
( windowWidth, windowHeight ), # default window size
"Composite Variabler", # window title
minSize = ( windowWidth, windowHeight ), # minimum size (for resizing)
maxSize = ( windowWidth + windowWidthResize, windowHeight + windowHeightResize ), # maximum size (for resizing)
autosaveName = "com.mekkablue.CompoundVariabler.mainwindow" # stores last window position and size
)
# UI elements:
linePos, inset, lineHeight = 12, 15, 22
self.w.descriptionText = vanilla.TextBox( (inset, linePos+2, -inset, 28), u"Reduplicates Bracket layers of components in the composites in which they are used. Makes bracket layers work in the variable font exports.", sizeStyle='small', selectable=True )
linePos += lineHeight*2
self.w.allGlyphs = vanilla.CheckBox( (inset, linePos-1, -inset, 20), u"Include all exporting glyphs in font (otherwise only selected glyphs)", value=False, callback=self.SavePreferences, sizeStyle='small' )
self.w.allGlyphs.getNSButton().setToolTip_("If checked, all glyphs in the font will be processed and receive the special (brace and bracket) layers of their respective components. If unchecked, only selected composite glyphs get processed.")
linePos += lineHeight
self.w.decomposeBrackets = vanilla.CheckBox( (inset, linePos-1, -inset, 20), u"Decompose bracket layers in composites (currently broken)", value=True, callback=self.SavePreferences, sizeStyle='small' )
self.w.decomposeBrackets.getNSButton().setToolTip_("If checked, will decompose bracket layers. This is necessary for bracket layers to work in OTVAR fonts in Glyphs 2.6.")
linePos += lineHeight
self.w.deleteExistingSpecialLayers = vanilla.CheckBox( (inset, linePos-1, -inset, 20), u"Delete pre-existing bracket layers in composites", value=False, callback=self.SavePreferences, sizeStyle='small' )
self.w.deleteExistingSpecialLayers.getNSButton().setToolTip_("If checked, will delete all bracket layers found in processed composite glyphs.")
linePos += lineHeight
self.w.justBackupInstead = vanilla.CheckBox( (inset*2, linePos-1, -inset, 20), u"Don’t delete, just backup and deactivate instead", value=False, callback=self.SavePreferences, sizeStyle='small' )
self.w.justBackupInstead.getNSButton().setToolTip_("If checked, will not delete, but just deactivate the layer by renaming it from ‘[100]’ to ‘#100#’.")
linePos += lineHeight
self.w.openTab = vanilla.CheckBox( (inset, linePos-1, -inset, 20), u"Open tab with affected composites", value=True, callback=self.SavePreferences, sizeStyle='small' )
self.w.openTab.getNSButton().setToolTip_("If checked, will open a tab with all composites that have received new special layers.")
linePos += lineHeight
self.w.catchNestedComponents = vanilla.CheckBox( (inset, linePos-1, -inset, 20), u"Catch all nested components (slower)", value=False, callback=self.SavePreferences, sizeStyle='small' )
self.w.catchNestedComponents.getNSButton().setToolTip_(u"If checked, will count max component depth (number of nestings, i.e. components of components of components, etc.) in the font, and repeat the whole process as many times. Will take significantly longer. Use this only if you need it (unlikely) and know what you are doing.")
linePos += lineHeight
self.w.progress = vanilla.ProgressBar((inset, linePos, -inset, 16))
self.w.progress.set(0) # set progress indicator to zero
linePos+=lineHeight
self.w.processedGlyph = vanilla.TextBox( (inset, linePos+2, -80-inset, 14), u"", sizeStyle='small', selectable=True )
linePos += lineHeight
# Run Button:
self.w.runButton = vanilla.Button( (-80-inset, -20-inset, -inset, -inset), "Run", sizeStyle='regular', callback=self.CompoundVariablerMain )
self.w.setDefaultButton( self.w.runButton )
# Load Settings:
if not self.LoadPreferences():
print("Note: 'Composite Variabler' could not load preferences. Will resort to defaults")
# Open window and focus on it:
self.w.open()
self.updateUI()
self.w.makeKey()
def updateUI(self, sender=None):
self.w.justBackupInstead.enable(self.w.deleteExistingSpecialLayers.get())
def SavePreferences( self, sender ):
try:
Glyphs.defaults["com.mekkablue.CompoundVariabler.allGlyphs"] = self.w.allGlyphs.get()
Glyphs.defaults["com.mekkablue.CompoundVariabler.openTab"] = self.w.openTab.get()
Glyphs.defaults["com.mekkablue.CompoundVariabler.deleteExistingSpecialLayers"] = self.w.deleteExistingSpecialLayers.get()
Glyphs.defaults["com.mekkablue.CompoundVariabler.decomposeBrackets"] = self.w.decomposeBrackets.get()
Glyphs.defaults["com.mekkablue.CompoundVariabler.catchNestedComponents"] = self.w.catchNestedComponents.get()
Glyphs.defaults["com.mekkablue.CompoundVariabler.justBackupInstead"] = self.w.justBackupInstead.get()
self.updateUI()
except:
return False
return True
def LoadPreferences( self ):
try:
Glyphs.registerDefault("com.mekkablue.CompoundVariabler.allGlyphs", 1)
Glyphs.registerDefault("com.mekkablue.CompoundVariabler.openTab", 0)
Glyphs.registerDefault("com.mekkablue.CompoundVariabler.deleteExistingSpecialLayers", 1)
Glyphs.registerDefault("com.mekkablue.CompoundVariabler.decomposeBrackets", 1)
Glyphs.registerDefault("com.mekkablue.CompoundVariabler.catchNestedComponents", 0)
Glyphs.registerDefault("com.mekkablue.CompoundVariabler.justBackupInstead", 1)
self.w.allGlyphs.set( Glyphs.defaults["com.mekkablue.CompoundVariabler.allGlyphs"] )
self.w.openTab.set( Glyphs.defaults["com.mekkablue.CompoundVariabler.openTab"] )
self.w.deleteExistingSpecialLayers.set( Glyphs.defaults["com.mekkablue.CompoundVariabler.deleteExistingSpecialLayers"] )
self.w.decomposeBrackets.set( Glyphs.defaults["com.mekkablue.CompoundVariabler.decomposeBrackets"] )
self.w.catchNestedComponents.set( Glyphs.defaults["com.mekkablue.CompoundVariabler.catchNestedComponents"] )
self.w.justBackupInstead.set( Glyphs.defaults["com.mekkablue.CompoundVariabler.justBackupInstead"] )
self.updateUI()
except:
return False
return True
def countNest(self, c):
thisFont = c.parent.parent.parent
if thisFont:
gName = c.componentName
g = thisFont.glyphs[gName]
if g:
gComponents = g.layers[0].components
if gComponents:
maxCount = max( self.countNest(cc) for cc in gComponents )
return 1+maxCount
return 1
def depthOfNesting(self, thisFont):
depths=[]
for g in Font.glyphs:
for l in g.layers:
if l.isMasterLayer or l.isSpecialLayer or l.isColorLayer:
for c in l.components:
depth = self.countNest(c)
depths.append(depth)
return max(depths)
def CompoundVariablerMain( self, sender ):
try:
# clear macro window log:
Glyphs.clearLog()
# update settings to the latest user input:
if not self.SavePreferences( self ):
print("Note: 'Composite Variabler' could not write preferences.")
allGlyphs = Glyphs.defaults["com.mekkablue.CompoundVariabler.allGlyphs"]
openTab = Glyphs.defaults["com.mekkablue.CompoundVariabler.openTab"]
deleteExistingSpecialLayers = Glyphs.defaults["com.mekkablue.CompoundVariabler.deleteExistingSpecialLayers"]
catchNestedComponents = Glyphs.defaults["com.mekkablue.CompoundVariabler.catchNestedComponents"]
decomposeBrackets = Glyphs.defaults["com.mekkablue.CompoundVariabler.decomposeBrackets"]
justBackupInstead = Glyphs.defaults["com.mekkablue.CompoundVariabler.justBackupInstead"]
thisFont = Glyphs.font # frontmost font
if thisFont is None:
Message(title="No Font Open", message="The script requires a font. Open a font and run the script again.", OKButton=None)
return
else:
print("Composite Variabler Report for %s" % thisFont.familyName)
if thisFont.filepath:
print(thisFont.filepath)
else:
print("⚠️ The font file has not been saved yet.")
print()
depth = 1
if catchNestedComponents:
print("Catching all component nestings...")
depth = self.depthOfNesting(thisFont)
depth = max(1,depth) # minimum 1, just to make sure
print(
"Found components nested up to %i time%s" % (
depth,
"" if depth==1 else "s",
)
)
if allGlyphs:
glyphs = [g for g in thisFont.glyphs if g.export]
print("Processing all glyphs (%i in total)..." % len(glyphs))
else:
glyphs = set([l.parent for l in thisFont.selectedLayers if l.parent.export])
print("Processing selected glyphs (%i in total)..." % len(glyphs))
for depthIteration in range(depth):
depthStatusAddition=""
if depth>1:
print("\nNesting iteration %i:"%(depthIteration+1))
depthStatusAddition="%i: "%(depthIteration+1)
glyphCount = len(glyphs)
affectedGlyphs = []
layersToDecompose = []
for i,currentGlyph in enumerate(glyphs):
# status update
self.w.progress.set(i*100.0/glyphCount)
processMessage = "%s%s" % (depthStatusAddition, currentGlyph.name)
self.w.processedGlyph.set( processMessage )
# print processMessage
# process layers
thisLayer = currentGlyph.layers[0]
if thisLayer.components and not thisLayer.paths: # pure composites only
# delete special layers if requested:
if deleteExistingSpecialLayers:
layerCount = len(currentGlyph.layers)
for i in reversed(range(layerCount)):
thatLayer = currentGlyph.layers[i]
if thatLayer.isSpecialLayer and "[" in thatLayer.name and "]" in thatLayer.name:
if justBackupInstead:
for bracket in "[]":
thatLayer.name = thatLayer.name.replace(bracket,"#")
print("%s: backed up layer: '%s'" % (currentGlyph.name, thatLayer.name))
else:
print("%s: deleted layer '%s'" % (currentGlyph.name, thatLayer.name))
del currentGlyph.layers[i]
for component in thisLayer.components:
originalGlyph = thisFont.glyphs[component.componentName]
if originalGlyph and not originalGlyph.isSmartGlyph():
for originalLayer in originalGlyph.layers:
if originalLayer.isSpecialLayer and "[" in originalLayer.name and "]" in originalLayer.name:
# namesAndMasterIDsOfSpecialLayers = [(l.name,l.associatedMasterId) for l in currentGlyph.layers if l.isSpecialLayer]
layerAlreadyExists = False
for currentGlyphLayer in currentGlyph.layers:
nameIsTheSame = originalLayer.name == currentGlyphLayer.name
masterIsTheSame = originalLayer.associatedMasterId == currentGlyphLayer.associatedMasterId
if nameIsTheSame and masterIsTheSame:
layerAlreadyExists = True
if layerAlreadyExists:
print("%s, layer '%s' already exists. Skipping." % (currentGlyph.name, originalLayer.name))
else:
newLayer = GSLayer()
newLayer.name = originalLayer.name
newLayer.setAssociatedMasterId_(originalLayer.associatedMasterId)
print(newLayer.associatedMasterId, originalLayer.associatedMasterId)
newLayer.width = originalLayer.width
currentGlyph.layers.append(newLayer)
newLayer.reinterpolate()
newLayer.reinterpolateMetrics()
affectedGlyphs.append(currentGlyph.name)
print("%s, new layer: '%s'%s" % (
currentGlyph.name,
newLayer.name,
" (decomposed)" if decomposeBrackets else "",
))
if decomposeBrackets:
layersToDecompose.append(newLayer)
# decompose (must happen after all reinterpolations are done):
for bracketLayer in layersToDecompose:
for component in bracketLayer.components:
component.decompose()
# status update
self.w.progress.set(100)
self.w.processedGlyph.set( "Done." )
print("Done.")
if affectedGlyphs:
if openTab:
# opens new Edit tab:
tabText = "/" + "/".join(set(affectedGlyphs))
thisFont.newTab( tabText )
# Floating notification:
numOfGlyphs = len(set(affectedGlyphs))
Glyphs.showNotification(
u"%s" % (thisFont.familyName),
u"Composite Variabler added layers to %i composite glyph%s. Details in Macro Window." % (
numOfGlyphs,
"" if numOfGlyphs==1 else "s",
),
)
else:
# Floating notification:
Glyphs.showNotification(
u"%s" % (thisFont.familyName),
u"Composite Variabler added no new layers. Details in Macro Window.",
)
except Exception as e:
# brings macro window to front and reports error:
Glyphs.showMacroWindow()
print("Composite Variabler Error: %s" % e)
import traceback
print(traceback.format_exc())
CompoundVariabler() | [
"res@glyphsapp.com"
] | res@glyphsapp.com |
962b85fe59681509f7a23e5cb92b8b9d64a6ab91 | 118d13a5569092e1742151bcee449a8ed1dff164 | /ms_graph/drive_items.py | 343881b983143609331aebb13cc6076d71c1aaa8 | [
"MIT"
] | permissive | tarikap/ms-graph-python-client | 08cc9da20e49ba74ded2b6cd8afcdf2574c5e609 | 64a2df6f6c1bfcd89a78a500fb4bd04b560a5232 | refs/heads/master | 2023-02-09T18:22:46.394673 | 2020-12-06T11:15:53 | 2020-12-06T11:15:53 | 319,012,621 | 0 | 0 | null | 2020-12-06T11:05:30 | 2020-12-06T11:05:30 | null | UTF-8 | Python | false | false | 8,887 | py | from typing import Dict
from ms_graph.session import GraphSession
class DriveItems():
"""
## Overview:
----
The driveItem resource represents a file, folder,
or other item stored in a drive. All file system
objects in OneDrive and SharePoint are returned as
driveItem resources.
"""
def __init__(self, session: object) -> None:
"""Initializes the `DriveItems` object.
### Parameters
----
session : object
An authenticated session for our Microsoft Graph Client.
"""
# Set the session.
self.graph_session: GraphSession = session
# Set the endpoint.
self.endpoint = 'drive'
self.collections_endpoint = 'drives/'
def get_drive_item(self, drive_id: str, item_id: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
drive_id : str
The Drive ID in which the resource exist.
item_id : str
The item ID of the object you want to
return.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint=self.collections_endpoint + "/{drive_id}/items/{item_id}".format(
drive_id=drive_id,
item_id=item_id
)
)
return content
def get_drive_item_by_path(self, drive_id: str, item_path: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
drive_id : str
The Drive ID in which the resource exist.
item_path : str
The path to the Item.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint=self.collections_endpoint + "/{drive_id}/root:/{path}".format(
drive_id=drive_id,
path=item_path
)
)
return content
def get_group_drive_item(self, group_id: str, item_id: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
group_id : str
The Group ID in which the resource exist.
item_id : str
The item ID of the object you want to
return.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint="/groups/{group_id}/drive/items/{item_id}".format(
group_id=group_id,
item_id=item_id
)
)
return content
def get_group_drive_item_by_path(self, group_id: str, item_path: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
drive_id : str
The Drive ID in which the resource exist.
item_path : str
The path to the Item.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint="/groups/{group_id}/drive/root:/{item_path}".format(
group_id=group_id,
item_path=item_path
)
)
return content
def get_my_drive_item(self, item_id: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
item_id : str
The item ID of the object you want to
return.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint="/me/drive/items/{item_id}".format(
item_id=item_id
)
)
return content
def get_my_drive_item_by_path(self, item_path: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
item_path : str
The path to the Item.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint="/me/drive/root:/{item_path}".format(
item_path=item_path
)
)
return content
def get_site_drive_item(self, site_id: str, item_id: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
site_id : str
The site ID which to query the item from.
item_id : str
The item ID of the object you want to
return.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint="/sites/{site_id}/drive/items/{item_id}".format(
site_id=site_id,
item_id=item_id
)
)
return content
def get_site_drive_item_by_path(self, site_id: str, item_path: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
site_id : str
The site ID which to query the item from.
item_path : str
The path to the Item.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint="/sites/{site_id}/drive/root:/{item_path}".format(
site_id=site_id,
item_path=item_path
)
)
return content
def get_site_drive_item_from_list(self, site_id: str, list_id: str, item_id: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
site_id : str
The site ID which to query the item from.
list_id : str
The list ID which to query the item from.
item_id : str
The item ID of the object you want to
return.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint="/sites/{site_id}/lists/{list_id}/items/{item_id}/driveItem".format(
site_id=site_id,
list_id=list_id,
item_id=item_id
)
)
return content
def get_user_drive_item(self, user_id: str, item_id: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
user_id : str
The User ID which to query the item from.
item_id : str
The item ID of the object you want to
return.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint="/users/{user_id}/drive/items/{item_id}".format(
user_id=user_id,
item_id=item_id
)
)
return content
def get_user_drive_item_by_path(self, user_id: str, item_path: str) -> Dict:
"""Grab's a DriveItem Resource using the Item ID and Drive ID.
### Parameters
----
site_id : str
The User ID which to query the item from.
item_path : str
The path to the Item.
### Returns
----
Dict:
A DriveItem resource object.
"""
content = self.graph_session.make_request(
method='get',
endpoint="/users/{user_id}/drive/root:/{item_path}".format(
user_id=user_id,
item_path=item_path
)
)
return content
# GET /drives/{drive-id}/items/{item-id}
# GET /drives/{drive-id}/root:/{item-path}
# GET /groups/{group-id}/drive/items/{item-id}
# GET /groups/{group-id}/drive/root:/{item-path}
# GET /me/drive/items/{item-id}
# GET /me/drive/root:/{item-path}
# GET /sites/{site-id}/drive/items/{item-id}
# GET /sites/{site-id}/drive/root:/{item-path}
# GET /sites/{site-id}/lists/{list-id}/items/{item-id}/driveItem
# GET /users/{user-id}/drive/items/{item-id}
# GET /users/{user-id}/drive/root:/{item-path}
| [
"alexreed1192@gmail.com"
] | alexreed1192@gmail.com |
59549cf9b5090c462054501984f18528b5eeecb8 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2563/60771/286481.py | 6440860658ef460e394d3ddb3ec560656c52722f | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | #08
s = eval(input())
if s == "1000000000000000000":
print("999999999999999999")
exit(0)
ones = ["1","11","111","1111","11111"]
for i in range(2,11):
for item in ones:
if int(s) == int(item,i):
print(i)
exit(0)
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
d22116543afbd061fe73aff46c090483128e53a9 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startCirq220.py | ec7bc93bbdf4174fbf1914c1bd69aa6b569cdc19 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,696 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=9
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.Z.on(input_qubit[3])) # number=7
c.append(cirq.Z.on(input_qubit[1])) # number=8
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.SWAP.on(input_qubit[3],input_qubit[0])) # number=5
c.append(cirq.SWAP.on(input_qubit[3],input_qubit[0])) # number=6
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq220.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
9e08d4193970889aeba26408618a9dbea26434fb | 7869d3413fd13a98fd9bc53186418be4885b773a | /function/dataprocess.py | 0751d8cfb887d9adc24ec779d792d1e32ffde976 | [] | no_license | abandonsea/RSSAN-Hyperspectral-Image | ca5e4fdebc20a5583b0af193798dc8e7d8a1b353 | a6b9552dc52cfbb81f3060689bd23b86613c93a5 | refs/heads/main | 2023-07-11T07:45:32.902912 | 2021-08-19T07:42:04 | 2021-08-19T07:42:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,836 | py | import os
import sys
import torch.utils.data
import scipy.io as sio
from sklearn.model_selection import train_test_split
import numpy as np
import torch
from HSI_torch import CommonDataset
def normalize(data):
"""
normalize the HSI data
"""
data = data.astype(np.float)
for i in range(len(data)):
data[i, :, :] -= data[i, :, :].min()
data[i, :, :] /= data[i, :, :].max()
return data
def random_unison(a, b, rstate=None):
assert len(a) == len(b)
p = np.random.RandomState(seed=rstate).permutation(len(a))
return a[p], b[p]
def split_data(pixels, labels, train_list=None, percent=0.1, splitdset="custom", rand_state=77):
if splitdset == "sklearn":
return train_test_split(pixels, labels, test_size=(1 - percent), stratify=labels, random_state=rand_state)
elif splitdset == "custom":
label, b = np.unique(labels, return_counts=True) # 去掉重复元素,并统计其出现的次数
trnum_list = [int(np.ceil(i * percent)) for i in b] # 取每类训练集个数 | 向上取整
trnumber = sum(trnum_list)
# print(trnumber)
tenumber = labels.shape[0] - trnumber
# print(tenumber)
train_X = np.zeros((trnumber, pixels.shape[1], pixels.shape[2], pixels.shape[3]))
train_Y = np.zeros(trnumber)
test_X = np.zeros((tenumber, pixels.shape[1], pixels.shape[2], pixels.shape[3]))
test_Y = np.zeros(tenumber)
trcont = 0
tecont = 0
for cl in np.unique(labels):
pixels_cl = pixels[labels == cl]
labels_cl = labels[labels == cl]
pixels_cl, labels_cl = random_unison(pixels_cl, labels_cl, rstate=rand_state)
for cont, (a, b) in enumerate(zip(pixels_cl, labels_cl)):
if cont < trnum_list[int(cl)]:
train_X[trcont, :, :, :] = a
train_Y[trcont] = b
trcont += 1
else:
test_X[tecont, :, :, :] = a
test_Y[tecont] = b
tecont += 1
train_X, train_Y = random_unison(train_X, train_Y, rstate=rand_state)
return train_X, test_X, train_Y, test_Y
elif splitdset == "self_spilt":
label, b = np.unique(labels, return_counts=True) # 去掉重复元素,并统计其出现的次数
class_number = label.shape[0] # 数据集种类数
trnumber = sum(train_list)
# print(trnumber)
tenumber = labels.shape[0] - trnumber
# print(tenumber)
# pixels_number = b # 每类像素个数[ , , , , , , ...]
train_X = np.zeros((trnumber, pixels.shape[1], pixels.shape[2], pixels.shape[3]))
train_Y = np.zeros(trnumber)
test_X = np.zeros((tenumber, pixels.shape[1], pixels.shape[2], pixels.shape[3]))
test_Y = np.zeros(tenumber)
# m = 0
# for i in range(0, class_number):
# temp = np.where(labels == i) # 元组,位置索引
# temp1 = random.sample(range(b[i]), train_num[i])
# # print(temp1)
# # print(temp)
# # print(temp[0][30])
# for j in range(train_num[i]):
# if m < train_num[i]:
# train_X[m, :, :, :] = pixels[temp[0][temp1[j]], :, :, :]
# label_x[m] = labels[temp[0][temp1[j]]]
# m += 1
trcont = 0
tecont = 0
for lb in label:
pixels_lb = pixels[labels == lb]
labels_lb = labels[labels == lb]
pixels_lb, labels_lb = random_unison(pixels_lb, labels_lb, rstate=rand_state)
for cont, (a, b) in enumerate(zip(pixels_lb, labels_lb)):
if cont < train_list[int(lb)]:
train_X[trcont, :, :, :] = a
train_Y[trcont] = b
trcont += 1
else:
test_X[tecont, :, :, :] = a
test_Y[tecont] = b
tecont += 1
train_X, train_Y = random_unison(train_X, train_Y, rstate=rand_state)
return train_X, test_X, train_Y, test_Y
def loaddata(names, datapath):
"""
数据集加载
:param names: 数据集名称 | IN PU SA KSC
:param datapath: 数据集存放目录
:return:data,labels
"""
data_path = os.path.join(datapath) # 数据集地址
if names == 'IN':
data = sio.loadmat(os.path.join(data_path, 'Indian_pines_corrected'))['indian_pines_corrected']
labels = sio.loadmat(os.path.join(data_path, 'Indian_pines_gt.mat'))['indian_pines_gt']
elif names == 'PU':
data = sio.loadmat(os.path.join(data_path, 'PaviaU.mat'))['paviaU']
labels = sio.loadmat(os.path.join(data_path, 'PaviaU_gt.mat'))['paviaU_gt']
elif names == 'SA':
data = sio.loadmat(os.path.join(data_path, 'Salinas_corrected.mat'))['salinas_corrected']
labels = sio.loadmat(os.path.join(data_path, 'Salinas_gt.mat'))['salinas_gt']
elif names == 'KSC':
data = sio.loadmat(os.path.join(data_path, 'KSC.mat'))['KSC']
labels = sio.loadmat(os.path.join(data_path, 'KSC_gt.mat'))['KSC_gt']
else:
print("NO DATASET")
sys.exit()
return data, labels
def pad_zero(data, window_size):
margin = int((window_size - 1) / 2)
return np.pad(data, ((margin, margin), (margin, margin), (0, 0)))
def CreatimageCube(tot_x, tot_y, Windows_Size, removeZeroLabels=True): # tot train or test
"""
:param tot_x:train\test\val
:param tot_y:train\test\val label
:param Windows_Size:patch_size
:param removeZeroLabels: Remove 0 tag, Recommended. if not, it will not
be usable because of low performance and memory overflow
:return: patches_x[index, row, col, bands], patches_y[index], kinds, Bands
"""
margin = int((Windows_Size - 1) / 2)
Bands = tot_x.shape[2]
kinds = np.unique(tot_y).shape[0] - 1 # 得到测试或者训练集中的种类数
paddeddata = pad_zero(tot_x, Windows_Size)
labelnum = np.sum(tot_y > 0)
if removeZeroLabels:
patches_x = np.zeros((labelnum, Windows_Size, Windows_Size, Bands))
patches_y = np.zeros(labelnum)
patches_index = 0
for row in range(margin, paddeddata.shape[0] - margin):
for col in range(margin, paddeddata.shape[1] - margin):
if tot_y[row - margin, col - margin] != 0:
patch = paddeddata[row - margin:row + margin + 1, col - margin:col + margin + 1, :]
patches_x[patches_index, :, :, :] = patch
patches_y[patches_index] = tot_y[row - margin, col - margin] - 1
patches_index += 1
del paddeddata
return patches_x, patches_y, kinds, Bands
else:
patches_x = np.zeros((tot_x.shape[0] * tot_x.shape[1], Windows_Size, Windows_Size, Bands))
patches_y = np.zeros(tot_x.shape[0] * tot_x.shape[1])
patches_index = 0
for row in range(margin, paddeddata.shape[0] - margin):
for col in range(margin, paddeddata.shape[0] - margin):
patch = paddeddata[row - margin:row + margin + 1, col - margin:col + margin + 1, :]
patches_x[patches_index, :, :, :] = patch
patches_y[patches_index] = tot_y[row - margin, col - margin] - 1
patches_index += 1
del paddeddata
return patches_x, patches_y, kinds, Bands
def load_dataset(path, name, batch_size, window_size, test_batch):
"""
加载数据集
:param batch_size: batch_size
:param path: 数据集根目录
:param name: 数据名 IN,PU,KSC,SA
:param test_batch: test_size
:param window_size: path_size
:return: train_loader, test_loader, val_loader, kinds, bands
"""
x, y = loaddata(name, path)
x = normalize(x) # 归一化
train_x, train_y, kinds, bands = CreatimageCube(x, y, window_size, removeZeroLabels=True)
train_x, test_x, train_y, test_y = split_data(train_x, train_y, percent=0.2, splitdset="custom", rand_state=77)
val_x, test_x, val_y, test_y = split_data(test_x, test_y, percent=0.125, splitdset="custom", rand_state=77)
train_hyper = CommonDataset((np.transpose(train_x, (0, 3, 1, 2)).astype("float32"), train_y))
test_hyper = CommonDataset((np.transpose(test_x, (0, 3, 1, 2)).astype("float32"), test_y))
val_hyper = CommonDataset((np.transpose(val_x, (0, 3, 1, 2)).astype("float32"), val_y))
train_loader = torch.utils.data.DataLoader(train_hyper, batch_size=batch_size, shuffle=True)
test_loader = torch.utils.data.DataLoader(test_hyper, batch_size=test_batch, shuffle=False)
val_loader = torch.utils.data.DataLoader(val_hyper, batch_size=batch_size, shuffle=True)
del test_hyper, val_hyper, train_x, test_x, train_y, test_y, val_x, val_y
return train_loader, test_loader, val_loader, kinds, bands, train_hyper.data.shape[1:]
# if __name__ == '__main__':
# path = r'F:\Residual Spectral–Spatial Attention Network for\Dataset'
# x, y = loaddata('IP', path)
# # train_num = [14, 419, 265, 69, 149, 219, 9, 144, 5, 288, 733, 175, 65, 376, 119, 31]
# # val_num = [4, 133, 99, 21, 52, 73, 3, 48, 1, 93, 242, 56, 24, 123, 41, 12]
# x = normalize(x) # 归一化
# train_x, train_y, kind, Band = CreatimageCube(x, y, 8, 17, removeZeroLabels=True)
#
# train_x, test_x, train_y, test_y = split_data(train_x, train_y, percent=0.2, splitdset="custom", rand_state=77)
# val_x, test_x, val_y, test_y = split_data(test_x, test_y, percent=0.125, splitdset="custom", rand_state=77)
| [
"noreply@github.com"
] | abandonsea.noreply@github.com |
11e929075f1dc33ce20b126bede66b9911fa154b | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5686275109552128_1/Python/LooneyLarry/solution.py | 48f0fd31d05cb88218678b13fa6205aae48076c4 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 4,246 | py | # Google Code Jam 2015 Qualifying B.
# Moving half of stack isn't optimal, right?
# Start: 9, make 4,5, total cost: 6
# make 3,3,3, total cost: 5
# Right, halving isn't optimal.
# Can't improve on 3.
# 1: 1
# 2: 2
# 3: 3
# 4: 3
# 5: 4
# 6: 1 + C(3) = 4
# 7?: 1 + C(4) = 4<= No, we can't split the 3 and the 4 stack simultaneously.
# 7: split*2 eat*3 = 5
# Is moving off piles of 3 optimal? (Not for 4)
# 8: split*2 eat*3 = 5
# 9: split*2 eat*3 = 5
# 10: split*3 eat*3 = 6 (or split + eat*5)
# 11: split*3 eat*3 = 6
# 12: split*3 eat*3 = 6
# 13: split*4 eat*3 = 7
# n: ceil(n/3) - 1 + 3 = ceil(n/3) + 2 Doesn't work for 4.
# I'm thinking 1, 2, and 4 are special cases and everything else
# follows this, but I'm unsure. There's only a 4 minute penalty for
# failed small input attempts.
# But input is multiple plates, not 1. We can split only one at a time.
# 4 4: split*2 eat*2 = 4
# 6 6: split*2 eat*3 = 5
# sum(ceil(nsubi/3)-1) + 3
# Special case if max is 4 and no 3.
# So, break everything into stacks of 3 (or 2 when 4), then eat
# 3 (or 2 if max was 4 and no 3). Can split one plate at a time
# ignoring all others (except if max stack is 4 or less and no 3).
# Store numToEat -- size of largest stack when done splitting.
# Increase to stacksize for stack of 1 2 or 3, increase to 3 for
# stack > 4, increase to 2 for stack of 4. (But only if only one 4.)
# For each plate, count splits required and adjust numToEat to {0..3}.
# Return number of splits plus numToEat.
#
# No, ceil(n/3) + 2 is wrong for 16. That comes to 8, but split*3 to
# make stacks of 4 followed by eat*4 = 7. Because breaking 16 into
# threes is inefficient.
# what about 18? Rule says 8. Can do that with stacks of 3 or 4 or 6.
# Rule is correct (because multiple of 3).
# What about 20? Rule says 9. Stacks of 4 or 5 cost 8.
# So stacks of sqrt(n) is optimal?
# 100? split*9 eat*10 = 19, rule says 36.
# 101? I suppose split*10 eat*10 = 20. Or split*9 eat*11. Or split*8
# eat*12. Or 7,13. Or 11,9. Or 12,8. But not 13,7. Because 14*7 <
# 101. 7,13 works because 8*13 >= 101. (That's (split+1)*eat.)
# sqrt makes sense since we want to minimize split+eat for maximum
# split*eat.
# So the maximum initial stack determines the target stack size, and
# all other stacks need to be split into that size. Does that work?
# If max is 100 so target stack is 10 but 999 other plates start
# at 11, 10 loses. So this is impossible without something like
# dynamic programming.
# Wait, there are only 1000 plates. I could try all stack sizes up to
# 1000 and see which wins. Maybe calculate cost for sqrt(max()) to
# have an upper bound for pruning. Hey, sqrt(max()) is the minimum
# target stack, we don't need to consider anything smaller. So if a
# stack starts at 1000, we try stack sizes 33ish to 1000. There could
# be 999 stacks of 999, so we can't stop earlier. But if the number
# of remaining stacks larger than i is low we could quit? Nevermind,
# this should be fast enough.
#
# So. Start target stacksize at sqrt(max()) - 1. (Nevermind.)
# Iterate plates counting splits needed to reach target. Cost of
# target is splits plus target (eating). Repeat incrementing target
# up to max.
import sys
# How many splits does it take to convert num pancakes into stacks
# not taller than target?
def countSplits(num, target):
if num <= 1:
return 0
# We do (9,3) by removing 3 twice (making 3 stacks of 3).
return (num - 1) / target
def doCase(file):
file.readline() # Ignore number of plates
plates = map(int, file.readline().split())
bestCost = 1000
for targetStack in range(1, 1001):
cost = targetStack # cost of eating stack after splitting
for plate in plates:
cost += countSplits(plate, targetStack) # cost of splitting
bestCost = min(bestCost, cost)
#print "Target {0}, cost {1}".format(targetStack, cost)
return bestCost
def run():
file = open(sys.argv[1])
numCases = int(file.readline())
for case in range(1, numCases+1):
answer = doCase(file)
print 'Case #{0}: {1}'.format(case, answer)
run()
| [
"root@debian"
] | root@debian |
77c43b89e4d98e184496a139bfe0ee501aac3077 | 4da462f01398e57f07532d09becbcb737278be6b | /tape/disasm/disasm.py | f1c16dedb37c92e200cfe9e509160dcdabef7389 | [
"BSD-2-Clause"
] | permissive | meawoppl/purgatory | d740bc30e547b6d8ef1e00353747ffae6701881f | 58abd57f7bf26457798f1d518c321ee52396fb3d | refs/heads/master | 2021-01-13T01:31:20.943177 | 2015-07-26T21:09:42 | 2015-07-26T21:09:42 | 37,833,544 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,271 | py | from __future__ import print_function
# test1.py
from capstone import Cs, CS_ARCH_X86, CS_MODE_64, CS_MODE_32
CODE = b"\x8d\x44\x38\x02"
md = Cs(CS_ARCH_X86, CS_MODE_32)
md.detail = True
for i in md.disasm(CODE, 0):
# print(dir(i))
print("0x%x:\t%s\t%s" % (i.address, i.mnemonic, i.op_str))
if len(i.regs_read) > 0:
print("\tImplicit registers read: "),
for r in i.regs_read:
print("%s " % i.reg_name(r)),
print
if len(i.groups) > 0:
print("\tThis instruction belongs to groups:", end="")
for g in i.groups:
print("%u" % g)
# print("%u" % g, end="")
print()
def dumpASM(flo, mode, maxAddr=1e99):
modeRef = {32: CS_MODE_32, 64: CS_MODE_64}
md = Cs(CS_ARCH_X86, modeRef[mode])
md.detail = True
for i in md.disasm(flo, 0):
# print(dir(i))
print("0x%x:\t%s\t%s" % (i.address, i.mnemonic, i.op_str))
print("\tImplicit registers read: ", end="")
for r in i.regs_read:
print("%s " % i.reg_name(r))
print()
print("\tImplicit registers written: ", end="")
for r in i.regs_write:
print("%s " % i.reg_name(r))
print()
if i.address > maxAddr:
break
| [
"meawoppl@gmail.com"
] | meawoppl@gmail.com |
e36693a654a9877c1004ae2e498dcd3df1c01fe5 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.0_rd=0.5_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=61/sched.py | 0f9be34a7f8ee395e190f23b91fd7672f0ff05c2 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | -X FMLP -Q 0 -L 4 71 250
-X FMLP -Q 0 -L 4 67 200
-X FMLP -Q 1 -L 3 58 300
-X FMLP -Q 1 -L 3 51 300
-X FMLP -Q 2 -L 2 42 300
-X FMLP -Q 3 -L 2 35 200
34 125
32 400
28 150
15 175
9 125
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
82110d933bc0ba23a080ecb46d59338023c167d3 | 0f214f060611489bd8c17b44a227c8b5497727f9 | /eola/chapter8.py | 9bf99bdfc32e5c89e059973bff8f1d637fc01a36 | [] | no_license | adamchandra/manim | ee6ed53ef9e9295446a5424c1dcdc45a8bada061 | 382fc41d375804bb7280d37c40c1ce11ff6f3777 | refs/heads/master | 2021-01-17T20:31:39.677177 | 2016-08-29T01:26:08 | 2016-08-29T01:26:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47,368 | py | from mobject.tex_mobject import TexMobject
from mobject import Mobject
from mobject.image_mobject import ImageMobject
from mobject.vectorized_mobject import VMobject
from animation.animation import Animation
from animation.transform import *
from animation.simple_animations import *
from topics.geometry import *
from topics.characters import *
from topics.functions import *
from topics.number_line import *
from topics.numerals import *
from scene import Scene
from camera import Camera
from mobject.svg_mobject import *
from mobject.tex_mobject import *
from mobject.vectorized_mobject import *
from eola.matrix import *
from eola.two_d_space import *
from eola.chapter5 import get_det_text, RightHandRule
U_COLOR = ORANGE
V_COLOR = YELLOW
W_COLOR = MAROON_B
P_COLOR = RED
def get_vect_tex(*strings):
result = ["\\vec{\\textbf{%s}}"%s for s in strings]
if len(result) == 1:
return result[0]
else:
return result
class OpeningQuote(Scene):
def construct(self):
words = TextMobject(
"``And what is the use of a book,'' thought Alice,",
"``without", "pictures", "or", "conversations", "?''"
)
words.highlight_by_tex("pictures", BLUE)
words.highlight_by_tex("conversations", MAROON_B)
words.scale_to_fit_width(2*SPACE_WIDTH - 2)
words.to_edge(UP)
author = TextMobject("-Lewis Carroll (Alice in Wonderland)")
author.highlight(YELLOW)
author.next_to(words, DOWN, buff = 0.5)
self.play(FadeIn(words))
self.dither(4)
self.play(Write(author, run_time = 3))
self.dither()
class LastVideo(Scene):
def construct(self):
title = TextMobject("""
Last video: Dot products and duality
""")
title.to_edge(UP)
rect = Rectangle(width = 16, height = 9, color = BLUE)
rect.scale_to_fit_height(6)
rect.next_to(title, DOWN)
self.add(title)
self.play(ShowCreation(rect))
self.dither()
class DoTheSameForCross(TeacherStudentsScene):
def construct(self):
words = TextMobject("Let's do the same \\\\ for", "cross products")
words.highlight_by_tex("cross products", YELLOW)
self.teacher_says(words, pi_creature_target_mode = "surprised")
self.random_blink(2)
self.change_student_modes("pondering")
self.random_blink()
class ListSteps(RandolphScene):
CONFIG = {
"randy_corner" : DOWN+RIGHT
}
def construct(self):
title = TextMobject("Two part chapter")
title.highlight(YELLOW)
title.to_edge(UP)
h_line = Line(LEFT, RIGHT).scale(SPACE_WIDTH)
h_line.next_to(title, DOWN)
step_1 = TextMobject("This video: Standard introduction")
step_2 = TextMobject("Next video: Deeper understanding with ", "linear transformations")
step_2.highlight_by_tex("linear transformations", BLUE)
steps = Group(step_1, step_2)
steps.arrange_submobjects(DOWN, aligned_edge = LEFT, buff = LARGE_BUFF)
steps.next_to(self.randy, UP)
steps.to_edge(LEFT)
self.add(title)
self.play(ShowCreation(h_line))
self.play(
Write(step_1),
ApplyFunction(
lambda m : m.change_mode("happy").look(UP+LEFT),
self.randy
)
)
self.dither(1)
self.play(
Write(step_2),
self.randy.change_mode, "pondering"
)
self.dither()
class ContrastDotAndCross(Scene):
def construct(self):
self.add_t_chart()
self.add_dot_products()
self.add_cross_product()
self.add_2d_cross_product()
self.emphasize_output_type()
def add_t_chart(self):
for word, vect, color in ("Dot", LEFT, BLUE_C), ("Cross", RIGHT, YELLOW):
title = TextMobject("%s product"%word)
title.shift(vect*SPACE_WIDTH/2)
title.to_edge(UP)
title.highlight(color)
self.add(title)
v_line = Line(UP, DOWN).scale(SPACE_HEIGHT)
l_h_line = Line(LEFT, ORIGIN).scale(SPACE_WIDTH)
r_h_line = Line(ORIGIN, RIGHT).scale(SPACE_WIDTH)
r_h_line.next_to(title, DOWN)
l_h_line.next_to(r_h_line, LEFT, buff = 0)
self.add(v_line, l_h_line, r_h_line)
self.l_h_line, self.r_h_line = l_h_line, r_h_line
def add_dot_products(self, max_width = SPACE_WIDTH-1, dims = [2, 5]):
colors = [X_COLOR, Y_COLOR, Z_COLOR, MAROON_B, TEAL]
last_mob = self.l_h_line
dot_products = []
for dim in dims:
arrays = [
[random.randint(0, 9) for in_count in range(dim)]
for out_count in range(2)
]
m1, m2 = map(Matrix, arrays)
for matrix in m1, m2:
for entry, color in zip(matrix.get_entries(), colors):
entry.highlight(color)
entry.target = entry.copy()
syms = Group(*map(TexMobject, ["="] + ["+"]*(dim-1)))
def get_dot():
dot = TexMobject("\\cdot")
syms.add(dot)
return dot
result = Group(*it.chain(*zip(
syms,
[
Group(
e1.target, get_dot(), e2.target
).arrange_submobjects()
for e1, e2 in zip(m1.get_entries(), m2.get_entries())
]
)))
result.arrange_submobjects(RIGHT)
dot_prod = Group(
m1, TexMobject("\\cdot"), m2, result
)
dot_prod.arrange_submobjects(RIGHT)
if dot_prod.get_width() > max_width:
dot_prod.scale_to_fit_width(max_width)
dot_prod.next_to(last_mob, DOWN, buff = MED_BUFF)
last_mob = dot_prod
dot_prod.to_edge(LEFT)
dot_prod.remove(result)
dot_prod.syms = syms
dot_prod.entries = list(m1.get_entries())+list(m2.get_entries())
dot_products.append(dot_prod)
self.add(*dot_products)
for dot_prod in dot_products:
self.play(
Write(dot_prod.syms),
*[
Transform(
e.copy(), e.target,
path_arc = -np.pi/6
)
for e in dot_prod.entries
],
run_time = 2
)
self.dither()
def add_cross_product(self):
colors = [X_COLOR, Y_COLOR, Z_COLOR]
arrays = [
[random.randint(0, 9) for in_count in range(3)]
for out_count in range(2)
]
matrices = map(Matrix, arrays)
for matrix in matrices:
for entry, color in zip(matrix.get_entries(), colors):
entry.highlight(color)
m1, m2 = matrices
cross_product = Group(m1, TexMobject("\\times"), m2)
cross_product.arrange_submobjects()
index_to_cross_enty = {}
syms = Group()
movement_sets = []
for a, b, c in it.permutations(range(3)):
e1, e2 = m1.get_entries()[b], m2.get_entries()[c]
for e in e1, e2:
e.target = e.copy()
movement_sets.append([e1, e1.target, e2, e2.target])
dot = TexMobject("\\cdot")
syms.add(dot)
cross_entry = Group(e1.target, dot, e2.target)
cross_entry.arrange_submobjects()
if a not in index_to_cross_enty:
index_to_cross_enty[a] = []
index_to_cross_enty[a].append(cross_entry)
result_entries = []
for a in range(3):
prod1, prod2 = index_to_cross_enty[a]
if a == 1:
prod1, prod2 = prod2, prod1
prod2.arrange_submobjects(LEFT)
minus = TexMobject("-")
syms.add(minus)
entry = Group(prod1, minus, prod2)
entry.arrange_submobjects(RIGHT)
result_entries.append(entry)
result = Matrix(result_entries)
full_cross_product = Group(
cross_product, TexMobject("="), result
)
full_cross_product.arrange_submobjects()
full_cross_product.scale(0.75)
full_cross_product.next_to(self.r_h_line, DOWN, buff = MED_BUFF/2)
full_cross_product.remove(result)
self.play(
Write(full_cross_product),
)
movements = []
for e1, e1_target, e2, e2_target in movement_sets:
movements += [
e1.copy().move_to, e1_target,
e2.copy().move_to, e2_target,
]
brace = Brace(cross_product)
brace_text = brace.get_text("Only 3d")
self.play(
GrowFromCenter(brace),
Write(brace_text)
)
self.play(
Write(result.get_brackets()),
Write(syms),
*movements,
run_time = 2
)
self.dither()
self.cross_result = result
self.only_3d_text = brace_text
def add_2d_cross_product(self):
h_line = DashedLine(ORIGIN, SPACE_WIDTH*RIGHT)
h_line.next_to(self.only_3d_text, DOWN, buff = MED_BUFF/2)
h_line.to_edge(RIGHT, buff = 0)
arrays = np.random.randint(0, 9, (2, 2))
m1, m2 = matrices = map(Matrix, arrays)
for m in matrices:
for e, color in zip(m.get_entries(), [X_COLOR, Y_COLOR]):
e.highlight(color)
cross_product = Group(m1, TexMobject("\\times"), m2)
cross_product.arrange_submobjects()
(x1, x2), (x3, x4) = tuple(m1.get_entries()), tuple(m2.get_entries())
entries = [x1, x2, x3, x4]
for entry in entries:
entry.target = entry.copy()
eq, dot1, minus, dot2 = syms = map(TexMobject,
["=", "\\cdot", "-", "\\cdot"]
)
result = Group(
eq, x1.target, dot1, x4.target,
minus, x3.target, dot2, x2.target,
)
result.arrange_submobjects(RIGHT)
full_cross_product = Group(cross_product, result)
full_cross_product.arrange_submobjects(RIGHT)
full_cross_product.next_to(h_line, DOWN, buff = MED_BUFF/2)
self.play(ShowCreation(h_line))
self.play(Write(cross_product))
self.play(
Write(Group(*syms)),
*[
Transform(entry.copy(), entry.target)
for entry in entries
]
)
self.dither()
self.two_d_result = Group(*result[1:])
def emphasize_output_type(self):
three_d_brace = Brace(self.cross_result)
two_d_brace = Brace(self.two_d_result)
vector = three_d_brace.get_text("Vector")
number = two_d_brace.get_text("Number")
self.play(
GrowFromCenter(two_d_brace),
Write(number)
)
self.dither()
self.play(
GrowFromCenter(three_d_brace),
Write(vector)
)
self.dither()
class PrereqDeterminant(Scene):
def construct(self):
title = TextMobject("""
Prerequisite: Understanding determinants
""")
title.scale_to_fit_width(2*SPACE_WIDTH - 2)
title.to_edge(UP)
rect = Rectangle(width = 16, height = 9, color = BLUE)
rect.scale_to_fit_height(6)
rect.next_to(title, DOWN)
self.add(title)
self.play(ShowCreation(rect))
self.dither()
class Define2dCrossProduct(LinearTransformationScene):
CONFIG = {
"show_basis_vectors" : False,
"v_coords" : [3, 1],
"w_coords" : [2, -1],
}
def construct(self):
self.initial_definition()
self.show_transformation()
self.transform_square()
self.show_orientation_rule()
def initial_definition(self):
self.plane.save_state()
self.plane.fade()
v = self.add_vector(self.v_coords, color = V_COLOR)
w = self.add_vector(self.w_coords, color = W_COLOR)
self.moving_vectors.remove(v)
self.moving_vectors.remove(w)
for vect, name, direction in (v, "v", "left"), (w, "w", "right"):
color = vect.get_color()
vect.label = self.label_vector(
vect, name, color = color, direction = direction,
)
vect.coord_array = vector_coordinate_label(
vect, color = color,
)
vect.coords = vect.coord_array.get_entries()
for vect, edge in (v, DOWN), (w, UP):
vect.coord_array.move_to(
vect.coord_array.get_center(),
aligned_edge = edge
)
self.play(Write(vect.coord_array, run_time = 1))
movers = [v.label, w.label, v.coords, w.coords]
for mover in movers:
mover.target = mover.copy()
times = TexMobject("\\times")
cross_product = Group(
v.label.target, times, w.label.target
)
cross_product.arrange_submobjects()
matrix = Matrix(np.array([
list(v.coords.target),
list(w.coords.target)
]).T)
det_text = get_det_text(matrix)
full_det = Group(det_text, matrix)
equals = TexMobject("=")
equation = Group(cross_product, equals, full_det)
equation.arrange_submobjects()
equation.to_corner(UP+LEFT)
matrix_background = BackgroundRectangle(matrix)
cross_background = BackgroundRectangle(cross_product)
disclaimer = TextMobject("$^*$ See ``Note on conventions'' in description")
disclaimer.scale(0.7)
disclaimer.highlight(RED)
disclaimer.next_to(
det_text.get_corner(UP+RIGHT), RIGHT, buff = 0
)
disclaimer.add_background_rectangle()
self.play(
FadeIn(cross_background),
Transform(v.label.copy(), v.label.target),
Transform(w.label.copy(), w.label.target),
Write(times),
)
self.dither()
self.play(
ShowCreation(matrix_background),
Write(matrix.get_brackets()),
run_time = 1
)
self.play(Transform(v.coords.copy(), v.coords.target))
self.play(Transform(w.coords.copy(), w.coords.target))
matrix.add_to_back(matrix_background)
self.dither()
self.play(
Write(equals),
Write(det_text),
Animation(matrix),
)
self.dither()
self.play(FadeIn(disclaimer))
self.dither()
self.play(FadeOut(disclaimer))
self.dither()
cross_product.add_to_back(cross_background)
cross_product.add(equals)
self.cross_product = cross_product
self.matrix = matrix
self.det_text = det_text
self.v, self.w = v, w
def show_transformation(self):
matrix = self.matrix.copy()
everything = self.get_mobjects()
everything.remove(self.plane)
everything.remove(self.background_plane)
self.play(
*map(FadeOut, everything) + [
Animation(self.background_plane),
self.plane.restore,
Animation(matrix),
])
i_hat, j_hat = self.get_basis_vectors()
for vect in i_hat, j_hat:
vect.save_state()
basis_labels = self.get_basis_vector_labels()
self.play(
ShowCreation(i_hat),
ShowCreation(j_hat),
Write(basis_labels)
)
self.dither()
side_brace = Brace(matrix, RIGHT)
transform_words = side_brace.get_text("Linear transformation")
transform_words.add_background_rectangle()
col1, col2 = [
Group(*matrix.get_mob_matrix()[i,:])
for i in 0, 1
]
both_words = []
for char, color, col in ("i", X_COLOR, col1), ("j", Y_COLOR, col2):
words = TextMobject("Where $\\hat\\%smath$ lands"%char)
words.highlight(color)
words.add_background_rectangle()
words.next_to(col, DOWN, buff = LARGE_BUFF)
words.arrow = Arrow(words.get_top(), col.get_bottom(), color = color)
both_words.append(words)
i_words, j_words = both_words
self.play(
GrowFromCenter(side_brace),
Write(transform_words)
)
self.play(
Write(i_words),
ShowCreation(i_words.arrow),
col1.highlight, X_COLOR
)
self.dither()
self.play(
Transform(i_words, j_words),
Transform(i_words.arrow, j_words.arrow),
col2.highlight, Y_COLOR
)
self.dither()
self.play(*map(FadeOut, [i_words, i_words.arrow, basis_labels]))
self.add_vector(i_hat, animate = False)
self.add_vector(j_hat, animate = False)
self.play(*map(FadeOut, [side_brace, transform_words]))
self.add_foreground_mobject(matrix)
self.apply_transposed_matrix([self.v_coords, self.w_coords])
self.dither()
self.play(
FadeOut(self.plane),
*map(Animation, [
self.background_plane,
matrix,
i_hat,
j_hat,
])
)
self.play(
ShowCreation(self.v),
ShowCreation(self.w),
FadeIn(self.v.label),
FadeIn(self.w.label),
FadeIn(self.v.coord_array),
FadeIn(self.w.coord_array),
matrix.highlight_columns, V_COLOR, W_COLOR
)
self.dither()
self.i_hat, self.j_hat = i_hat, j_hat
self.matrix = matrix
def transform_square(self):
self.play(Write(self.det_text))
self.matrix.add(self.det_text)
vect_stuffs = Group(*it.chain(*[
[m, m.label, m.coord_array]
for m in self.v, self.w
]))
to_restore = [self.plane, self.i_hat, self.j_hat]
for mob in to_restore:
mob.fade(1)
self.play(*map(FadeOut, vect_stuffs))
self.play(
*[m.restore for m in to_restore] + [
Animation(self.matrix)
]
)
self.add_unit_square(animate = True, opacity = 0.2)
self.square.save_state()
self.dither()
self.apply_transposed_matrix(
[self.v_coords, self.w_coords]
)
self.dither()
self.play(
FadeOut(self.plane),
Animation(self.matrix),
*map(FadeIn, vect_stuffs)
)
self.play(Write(self.cross_product))
det_text_brace = Brace(self.det_text)
area_words = det_text_brace.get_text("Area of this parallelogram")
area_words.add_background_rectangle()
area_arrow = Arrow(
area_words.get_bottom(),
self.square.get_center(),
color = WHITE
)
self.play(
GrowFromCenter(det_text_brace),
Write(area_words),
ShowCreation(area_arrow)
)
self.dither()
pm = Group(*map(TexMobject, ["+", "-"]))
pm.gradient_highlight(GREEN, RED)
pm.arrange_submobjects(DOWN, buff = SMALL_BUFF)
pm.add_to_back(BackgroundRectangle(pm))
pm.next_to(area_words[0], LEFT, aligned_edge = DOWN)
self.play(
Transform(self.square.get_point_mobject(), pm),
path_arc = -np.pi/2
)
self.dither()
self.play(*map(FadeOut, [
area_arrow, self.v.coord_array, self.w.coord_array
]))
def show_orientation_rule(self):
self.remove(self.i_hat, self.j_hat)
for vect in self.v, self.w:
vect.add(vect.label)
vect.target = vect.copy()
angle = np.pi/3
self.v.target.rotate(-angle)
self.w.target.rotate(angle)
self.v.target.label.rotate_in_place(angle)
self.w.target.label.rotate_in_place(-angle)
for vect in self.v, self.w:
vect.target.label[0].set_fill(opacity = 0)
self.square.target = self.square.copy().restore()
transform = self.get_matrix_transformation([
self.v.target.get_end()[:2],
self.w.target.get_end()[:2],
])
self.square.target.apply_function(transform)
movers = Group(self.square, self.v, self.w)
movers.target = Group(*[m.target for m in movers])
movers.save_state()
self.remove(self.square)
self.play(Transform(movers, movers.target))
self.dither()
v_tex, w_tex = ["\\vec{\\textbf{%s}}"%s for s in "v", "w"]
positive_words, negative_words = words_list = [
TexMobject(v_tex, "\\times", w_tex, "\\text{ is }", word)
for word in "\\text{positive}", "\\text{negative}"
]
for words in words_list:
words.highlight_by_tex(v_tex, V_COLOR)
words.highlight_by_tex(w_tex, W_COLOR)
words.highlight_by_tex("\\text{positive}", GREEN)
words.highlight_by_tex("\\text{negative}", RED)
words.add_background_rectangle()
words.next_to(self.square, UP)
arc = self.get_arc(self.v, self.w)
arc.highlight(GREEN)
self.play(
Write(positive_words),
ShowCreation(arc)
)
self.dither()
self.remove(arc)
self.play(movers.restore)
arc = self.get_arc(self.v, self.w)
arc.highlight(RED)
self.play(
Transform(positive_words, negative_words),
ShowCreation(arc)
)
self.dither()
anticommute = TexMobject(
v_tex, "\\times", w_tex, "=-", w_tex, "\\times", v_tex
)
anticommute.shift(SPACE_WIDTH*RIGHT/2)
anticommute.to_edge(UP)
anticommute.highlight_by_tex(v_tex, V_COLOR)
anticommute.highlight_by_tex(w_tex, W_COLOR)
anticommute.add_background_rectangle()
for v1, v2 in (self.v, self.w), (self.w, self.v):
v1.label[0].set_fill(opacity = 0)
v1.target = v1.copy()
v1.target.label.rotate_in_place(v1.get_angle()-v2.get_angle())
v1.target.label.scale_in_place(v1.get_length()/v2.get_length())
v1.target.rotate(v2.get_angle()-v1.get_angle())
v1.target.scale(v2.get_length()/v1.get_length())
v1.target.label.move_to(v2.label)
self.play(
FadeOut(arc),
Transform(positive_words, anticommute)
)
self.play(
Transform(self.v, self.v.target),
Transform(self.w, self.w.target),
rate_func = there_and_back,
run_time = 2,
)
self.dither()
def get_arc(self, v, w, radius = 2):
v_angle, w_angle = v.get_angle(), w.get_angle()
nudge = 0.05
arc = Arc(
(1-2*nudge)*(v_angle - w_angle),
start_angle = interpolate(w_angle, v_angle, nudge),
radius = radius,
stroke_width = 8,
)
arc.add_tip()
return arc
class TwoDCrossProductExample(Define2dCrossProduct):
CONFIG = {
"v_coords" : [-3, 1],
"w_coords" : [2, 1],
}
def construct(self):
self.plane.fade()
v = Vector(self.v_coords, color = V_COLOR)
w = Vector(self.w_coords, color = W_COLOR)
v.coords = Matrix(self.v_coords)
w.coords = Matrix(self.w_coords)
v.coords.next_to(v.get_end(), LEFT)
w.coords.next_to(w.get_end(), RIGHT)
v.coords.highlight(v.get_color())
w.coords.highlight(w.get_color())
for coords in v.coords, w.coords:
coords.background_rectangle = BackgroundRectangle(coords)
coords.add_to_back(coords.background_rectangle)
v.label = self.get_vector_label(v, "v", "left", color = v.get_color())
w.label = self.get_vector_label(w, "w", "right", color = w.get_color())
matrix = Matrix(np.array([
list(v.coords.copy().get_entries()),
list(w.coords.copy().get_entries()),
]).T)
matrix_background = BackgroundRectangle(matrix)
col1, col2 = it.starmap(Group, matrix.get_mob_matrix().T)
det_text = get_det_text(matrix)
v_tex, w_tex = get_vect_tex("v", "w")
cross_product = TexMobject(v_tex, "\\times", w_tex, "=")
cross_product.highlight_by_tex(v_tex, V_COLOR)
cross_product.highlight_by_tex(w_tex, W_COLOR)
cross_product.add_background_rectangle()
equation_start = Group(
cross_product,
Group(matrix_background, det_text, matrix)
)
equation_start.arrange_submobjects()
equation_start.next_to(ORIGIN, DOWN).to_edge(LEFT)
for vect in v, w:
self.play(
ShowCreation(vect),
Write(vect.coords),
Write(vect.label)
)
self.dither()
self.play(
Transform(v.coords.background_rectangle, matrix_background),
Transform(w.coords.background_rectangle, matrix_background),
Transform(v.coords.get_entries(), col1),
Transform(w.coords.get_entries(), col2),
Transform(v.coords.get_brackets(), matrix.get_brackets()),
Transform(w.coords.get_brackets(), matrix.get_brackets()),
)
self.play(*map(Write, [det_text, cross_product]))
v1, v2 = v.coords.get_entries()
w1, w2 = w.coords.get_entries()
entries = v1, v2, w1, w2
for entry in entries:
entry.target = entry.copy()
det = np.linalg.det([self.v_coords, self.w_coords])
equals, dot1, minus, dot2, equals_result = syms = Group(*map(
TexMobject,
["=", "\\cdot", "-", "\\cdot", "=%d"%det]
))
equation_end = Group(
equals, v1.target, dot1, w2.target,
minus, w1.target, dot2, v2.target, equals_result
)
equation_end.arrange_submobjects()
equation_end.next_to(equation_start)
syms_rect = BackgroundRectangle(syms)
syms.add_to_back(syms_rect)
equation_end.add_to_back(syms_rect)
syms.remove(equals_result)
self.play(
Write(syms),
Transform(
Group(v1, w2).copy(), Group(v1.target, w2.target),
rate_func = squish_rate_func(smooth, 0, 1./3),
path_arc = np.pi/2
),
Transform(
Group(v2, w1).copy(), Group(v2.target, w1.target),
rate_func = squish_rate_func(smooth, 2./3, 1),
path_arc = np.pi/2
),
run_time = 3
)
self.dither()
self.play(Write(equals_result))
self.add_foreground_mobject(equation_start, equation_end)
self.show_transformation(v, w)
det_sym = TexMobject(str(int(abs(det))))
det_sym.scale(1.5)
det_sym.next_to(v.get_end()+w.get_end(), DOWN+RIGHT, buff = MED_BUFF/2)
arc = self.get_arc(v, w, radius = 1)
arc.highlight(RED)
self.play(Write(det_sym))
self.play(ShowCreation(arc))
self.dither()
def show_transformation(self, v, w):
i_hat, j_hat = self.get_basis_vectors()
self.play(*map(ShowCreation, [i_hat, j_hat]))
self.add_unit_square(animate = True, opacity = 0.2)
self.apply_transposed_matrix(
[v.get_end()[:2], w.get_end()[:2]],
added_anims = [
Transform(i_hat, v),
Transform(j_hat, w)
]
)
class PlayAround(TeacherStudentsScene):
def construct(self):
self.teacher_says(""" \\centering
Play with the idea if
you wish to understand it
""")
self.change_student_modes("pondering", "happy", "happy")
self.random_blink(2)
self.student_thinks("", student_index = 0)
self.zoom_in_on_thought_bubble()
class BiggerWhenPerpendicular(LinearTransformationScene):
CONFIG = {
"show_basis_vectors" : False,
}
def construct(self):
self.lock_in_faded_grid()
self.add_unit_square(animate = False)
square = self.square
self.remove(square)
start_words = TextMobject("More perpendicular")
end_words = TextMobject("Similar direction")
arrow = TextMobject("\\Rightarrow")
v_tex, w_tex = get_vect_tex("v", "w")
cross_is = TexMobject(v_tex, "\\times", w_tex, "\\text{ is }")
cross_is.highlight_by_tex(v_tex, V_COLOR)
cross_is.highlight_by_tex(w_tex, W_COLOR)
bigger = TextMobject("bigger")
smaller = TextMobject("smaller")
bigger.scale(1.5)
smaller.scale(0.75)
bigger.highlight(PINK)
smaller.highlight(TEAL)
group = Group(start_words, arrow, cross_is, bigger)
group.arrange_submobjects()
group.to_edge(UP)
end_words.move_to(start_words, aligned_edge = RIGHT)
smaller.next_to(cross_is, buff = MED_BUFF/2, aligned_edge = DOWN)
for mob in list(group) + [end_words, smaller]:
mob.add_background_rectangle()
v = Vector([2, 2], color = V_COLOR)
w = Vector([2, -2], color = W_COLOR)
v.target = v.copy().rotate(-np.pi/5)
w.target = w.copy().rotate(np.pi/5)
transforms = [
self.get_matrix_transformation([v1.get_end()[:2], v2.get_end()[:2]])
for v1, v2 in (v, w), (v.target, w.target)
]
start_square, end_square = [
square.copy().apply_function(transform)
for transform in transforms
]
for vect in v, w:
self.play(ShowCreation(vect))
group.remove(bigger)
self.play(
FadeIn(group),
ShowCreation(start_square),
*map(Animation, [v, w])
)
self.play(GrowFromCenter(bigger))
self.dither()
self.play(
Transform(start_square, end_square),
Transform(v, v.target),
Transform(w, w.target),
)
self.play(
Transform(start_words, end_words),
Transform(bigger, smaller)
)
self.dither()
class ScalingRule(LinearTransformationScene):
CONFIG = {
"v_coords" : [2, -1],
"w_coords" : [1, 1],
"show_basis_vectors" : False
}
def construct(self):
self.lock_in_faded_grid()
self.add_unit_square(animate = False)
self.remove(self.square)
square = self.square
v = Vector(self.v_coords, color = V_COLOR)
w = Vector(self.w_coords, color = W_COLOR)
v.label = self.get_vector_label(v, "v", "right", color = V_COLOR)
w.label = self.get_vector_label(w, "w", "left", color = W_COLOR)
new_v = v.copy().scale(3)
new_v.label = self.get_vector_label(
new_v, "3\\vec{\\textbf{v}}", "right", color = V_COLOR
)
for vect in v, w, new_v:
vect.add(vect.label)
transform = self.get_matrix_transformation(
[self.v_coords, self.w_coords]
)
square.apply_function(transform)
new_squares = Group(*[
square.copy().shift(m*v.get_end())
for m in range(3)
])
v_tex, w_tex = get_vect_tex("v", "w")
cross_product = TexMobject(v_tex, "\\times", w_tex)
rhs = TexMobject("=3(", v_tex, "\\times", w_tex, ")")
three_v = TexMobject("(3", v_tex, ")")
for tex_mob in cross_product, rhs, three_v:
tex_mob.highlight_by_tex(v_tex, V_COLOR)
tex_mob.highlight_by_tex(w_tex, W_COLOR)
equation = Group(cross_product, rhs)
equation.arrange_submobjects()
equation.to_edge(UP)
v_tex_mob = cross_product[0]
three_v.move_to(v_tex_mob, aligned_edge = RIGHT)
for tex_mob in cross_product, rhs:
tex_mob.add_background_rectangle()
self.add(cross_product)
self.play(ShowCreation(v))
self.play(ShowCreation(w))
self.play(
ShowCreation(square),
*map(Animation, [v, w])
)
self.dither()
self.play(
Transform(v, new_v),
Transform(v_tex_mob, three_v),
)
self.dither()
self.play(
Transform(square, new_squares),
*map(Animation, [v, w]),
path_arc = -np.pi/6
)
self.dither()
self.play(Write(rhs))
self.dither()
class TechnicallyNotTheDotProduct(TeacherStudentsScene):
def construct(self):
self.teacher_says("""
That was technically
not the cross product
""")
self.change_student_modes("confused")
self.change_student_modes("confused", "angry")
self.change_student_modes("confused", "angry", "sassy")
self.random_blink(3)
class ThreeDShowParallelogramAndCrossProductVector(Scene):
pass
class WriteAreaOfParallelogram(Scene):
def construct(self):
words = TextMobject(
"Area of ", "parallelogram", " $=$ ", "$2.5$",
arg_separator = ""
)
words.highlight_by_tex("parallelogram", BLUE)
words.highlight_by_tex("$2.5$", BLUE)
result = words[-1]
words.remove(result)
self.play(Write(words))
self.dither()
self.play(Write(result, run_time = 1))
self.dither()
class WriteCrossProductProperties(Scene):
def construct(self):
v_tex, w_tex, p_tex = get_vect_tex(*"vwp")
cross_product = TexMobject(v_tex, "\\times", w_tex, "=", p_tex)
cross_product.highlight_by_tex(v_tex, V_COLOR)
cross_product.highlight_by_tex(w_tex, W_COLOR)
cross_product.highlight_by_tex(p_tex, P_COLOR)
cross_product.to_edge(UP, buff = LARGE_BUFF)
p_mob = cross_product[-1]
brace = Brace(p_mob)
brace.do_in_place(brace.stretch, 2, 0)
vector = brace.get_text("vector")
vector.highlight(P_COLOR)
length_words = TextMobject("With length", "2.5")
length_words.highlight_by_tex("2.5", BLUE)
length_words.next_to(vector, DOWN, buff = MED_BUFF)
perpendicular = TextMobject("""
Perpendicular to
the""", "parallelogram"
)
perpendicular.highlight_by_tex("parallelogram", BLUE)
perpendicular.next_to(length_words, DOWN, buff = MED_BUFF)
self.play(Write(cross_product))
self.play(
GrowFromCenter(brace),
Write(vector, run_time = 1)
)
self.dither()
self.play(Write(length_words, run_time = 1))
self.dither()
self.play(Write(perpendicular))
self.dither()
def get_cross_product_right_hand_rule_labels():
v_tex, w_tex = get_vect_tex(*"vw")
return [
v_tex, w_tex,
"%s \\times %s"%(v_tex, w_tex)
]
class CrossProductRightHandRule(RightHandRule):
CONFIG = {
"flip" : False,
"labels_tex" : get_cross_product_right_hand_rule_labels(),
"colors" : [U_COLOR, W_COLOR, P_COLOR],
}
class LabelingExampleVectors(Scene):
def construct(self):
v_tex, w_tex = texs = get_vect_tex(*"vw")
colors = [U_COLOR, W_COLOR, P_COLOR]
equations = [
TexMobject(v_tex, "=%s"%matrix_to_tex_string([0, 0, 2])),
TexMobject(w_tex, "=%s"%matrix_to_tex_string([0, 2, 0])),
TexMobject(
v_tex, "\\times", w_tex,
"=%s"%matrix_to_tex_string([-4, 0, 0])
),
]
for eq, color in zip(equations, colors):
eq.highlight(color)
eq.scale(2)
area_words = TextMobject("Area", "=4")
area_words[0].highlight(BLUE)
area_words.scale(2)
for mob in equations[:2] + [area_words, equations[2]]:
self.fade_in_out(mob)
def fade_in_out(self, mob):
self.play(FadeIn(mob))
self.dither()
self.play(FadeOut(mob))
class ThreeDTwoPossiblePerpendicularVectors(Scene):
pass
class ThreeDCrossProductExample(Scene):
pass
class ShowCrossProductFormula(Scene):
def construct(self):
colors = [X_COLOR, Y_COLOR, Z_COLOR]
arrays = [
["%s_%d"%(s, i) for i in range(1, 4)]
for s in "v", "w"
]
matrices = map(Matrix, arrays)
for matrix in matrices:
for entry, color in zip(matrix.get_entries(), colors):
entry.highlight(color)
m1, m2 = matrices
cross_product = Group(m1, TexMobject("\\times"), m2)
cross_product.arrange_submobjects()
cross_product.shift(2*LEFT)
def get_perm_sign(a, b, c):
identity = np.identity(3)
return np.linalg.det(identity[[a, b, c]])
entry_dicts = [{} for x in range(3)]
movement_sets = []
for a, b, c in it.permutations(range(3)):
sign = get_perm_sign(a, b, c)
e1, e2 = m1.get_entries()[b], m2.get_entries()[c]
for e in e1, e2:
e.target = e.copy()
dot = TexMobject("\\cdot")
syms = Group(dot)
if sign < 0:
minus = TexMobject("-")
syms.add(minus)
cross_entry = Group(minus, e2.target, dot, e1.target)
cross_entry.arrange_submobjects()
entry_dicts[a]["negative"] = cross_entry
else:
cross_entry = Group(e1.target, dot, e2.target)
cross_entry.arrange_submobjects()
entry_dicts[a]["positive"] = cross_entry
cross_entry.arrange_submobjects()
movement_sets.append([
e1, e1.target,
e2, e2.target,
syms
])
result = Matrix([
Group(
entry_dict["positive"],
entry_dict["negative"],
).arrange_submobjects()
for entry_dict in entry_dicts
])
equals = TexMobject("=").next_to(cross_product)
result.next_to(equals)
self.play(FadeIn(cross_product))
self.play(
Write(equals),
Write(result.get_brackets())
)
self.dither()
movement_sets[2], movement_sets[3] = movement_sets[3], movement_sets[2]
for e1, e1_target, e2, e2_target, syms in movement_sets:
e1.save_state()
e2.save_state()
self.play(
e1.scale_in_place, 1.5,
e2.scale_in_place, 1.5,
)
self.play(
Transform(e1.copy(), e1_target),
Transform(e2.copy(), e2_target),
Write(syms),
e1.restore,
e2.restore,
path_arc = -np.pi/2
)
self.dither()
class ThisGetsWeird(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"This gets weird...",
pi_creature_target_mode = "sassy"
)
self.random_blink(2)
class DeterminantTrick(Scene):
def construct(self):
v_terms, w_terms = [
["%s_%d"%(s, d) for d in range(1, 4)]
for s in "v", "w"
]
v = Matrix(v_terms)
w = Matrix(w_terms)
v.highlight(V_COLOR)
w.highlight(W_COLOR)
matrix = Matrix(np.array([
[
TexMobject("\\hat{%s}"%s)
for s in "\\imath", "\\jmath", "k"
],
list(v.get_entries().copy()),
list(w.get_entries().copy()),
]).T)
colors = [X_COLOR, Y_COLOR, Z_COLOR]
col1, col2, col3 = it.starmap(Group, matrix.get_mob_matrix().T)
i, j, k = col1
v1, v2, v3 = col2
w1, w2, w3 = col3
##Really should fix Matrix mobject...
j.shift(0.1*UP)
k.shift(0.2*UP)
Group(v2, w2).shift(0.1*DOWN)
Group(v3, w3).shift(0.2*DOWN)
##
for color, entry in zip(colors, col1):
entry.highlight(color)
det_text = get_det_text(matrix)
equals = TexMobject("=")
equation = Group(
v, TexMobject("\\times"), w,
equals, Group(det_text, matrix)
)
equation.arrange_submobjects()
self.add(*equation[:-2])
self.dither()
self.play(Write(matrix.get_brackets()))
for col, vect in (col2, v), (col3, w):
col.save_state()
col.move_to(vect.get_entries())
self.play(
col.restore,
path_arc = -np.pi/2,
)
for entry in col1:
self.play(Write(entry))
self.dither()
self.play(*map(Write, [equals, det_text]))
self.dither()
disclaimer = TextMobject("$^*$ See ``Note on conventions'' in description")
disclaimer.scale(0.7)
disclaimer.highlight(RED)
disclaimer.next_to(equation, DOWN)
self.play(FadeIn(disclaimer))
self.dither()
self.play(FadeOut(disclaimer))
circle = Circle()
circle.stretch_to_fit_height(col1.get_height()+1)
circle.stretch_to_fit_width(col1.get_width()+1)
circle.move_to(col1)
randy = Randolph()
randy.scale(0.9)
randy.to_corner()
randy.to_edge(DOWN, buff = SMALL_BUFF)
self.play(FadeIn(randy))
self.play(
randy.change_mode, "confused",
ShowCreation(circle)
)
self.play(randy.look, RIGHT)
self.dither()
self.play(FadeOut(circle))
self.play(
equation.to_corner, UP+LEFT,
ApplyFunction(
lambda r : r.change_mode("plain").look(UP+RIGHT),
randy
)
)
quints = [
(i, v2, w3, v3, w2),
(j, v3, w1, v1, w3),
(k, v1, w2, v2, w1),
]
last_mob = None
paren_sets = []
for quint in quints:
for mob in quint:
mob.t = mob.copy()
mob.save_state()
basis = quint[0]
basis.t.scale(1/0.8)
lp, minus, rp = syms = Group(*map(TexMobject, "(-)"))
term = Group(
basis.t, lp,
quint[1].t, quint[2].t, minus,
quint[3].t, quint[4].t, rp
)
term.arrange_submobjects()
if last_mob:
plus = TexMobject("+")
syms.add(plus)
plus.next_to(term, LEFT, buff = MED_BUFF/2)
term.add_to_back(plus)
term.next_to(last_mob, RIGHT, buff = MED_BUFF/2)
else:
term.next_to(equation, DOWN, buff = MED_BUFF, aligned_edge = LEFT)
last_mob = term
self.play(*it.chain(*[
[mob.scale_in_place, 1.2]
for mob in quint
]))
self.dither()
self.play(*[
Transform(mob.copy(), mob.t)
for mob in quint
] + [
mob.restore for mob in quint
] + [
Write(syms)
],
run_time = 2
)
self.dither()
paren_sets.append(Group(lp, rp))
self.dither()
self.play(randy.change_mode, "pondering")
for parens in paren_sets:
brace = Brace(parens)
text = brace.get_text("Some number")
text.scale_to_fit_width(brace.get_width())
self.play(
GrowFromCenter(brace),
Write(text, run_time = 2)
)
self.dither()
class ThereIsAReason(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"\\centering Sure, it's a \\\\", "notational", "trick",
)
self.random_blink(2)
words = TextMobject(
"\\centering but there is a\\\\",
"reason", "for doing it"
)
words.highlight_by_tex("reason", YELLOW)
self.teacher_says(words, pi_creature_target_mode = "surprised")
self.change_student_modes(
"raise_right_hand", "confused", "raise_left_hand"
)
self.random_blink()
class RememberDuality(TeacherStudentsScene):
def construct(self):
words = TextMobject("Remember ", "duality", "?", arg_separator = "")
words[1].gradient_highlight(BLUE, YELLOW)
self.teacher_says(words, pi_creature_target_mode = "sassy")
self.random_blink(2)
class NextVideo(Scene):
def construct(self):
title = TextMobject("""
Next video: Cross products in the
light of linear transformations
""")
title.scale_to_fit_height(1.2)
title.to_edge(UP, buff = MED_BUFF/2)
rect = Rectangle(width = 16, height = 9, color = BLUE)
rect.scale_to_fit_height(6)
rect.next_to(title, DOWN)
self.add(title)
self.play(ShowCreation(rect))
self.dither()
class CrossAndDualWords(Scene):
def construct(self):
v_tex, w_tex, p_tex = get_vect_tex(*"vwp")
vector_word = TextMobject("Vector:")
transform_word = TextMobject("Dual transform:")
cross = TexMobject(
p_tex, "=", v_tex, "\\times", w_tex
)
for tex, color in zip([v_tex, w_tex, p_tex], [U_COLOR, W_COLOR, P_COLOR]):
cross.highlight_by_tex(tex, color)
input_array_tex = matrix_to_tex_string(["x", "y", "z"])
func = TexMobject("L\\left(%s\\right) = "%input_array_tex)
matrix = Matrix(np.array([
["x", "y", "z"],
["v_1", "v_2", "v_3"],
["w_1", "w_2", "w_3"],
]).T)
matrix.highlight_columns(WHITE, U_COLOR, W_COLOR)
det_text = get_det_text(matrix, background_rect = False)
det_text.add(matrix)
dot_with_cross = TexMobject(
"%s \\cdot ( "%input_array_tex,
v_tex, "\\times", w_tex, ")"
)
dot_with_cross.highlight_by_tex(v_tex, U_COLOR)
dot_with_cross.highlight_by_tex(w_tex, W_COLOR)
transform = Group(func, det_text)
transform.arrange_submobjects()
Group(transform, dot_with_cross).scale(0.7)
Group(vector_word, cross).arrange_submobjects(
RIGHT, buff = MED_BUFF
).center().shift(LEFT).to_edge(UP)
transform_word.next_to(vector_word, DOWN, buff = MED_BUFF, aligned_edge = LEFT)
transform.next_to(transform_word, DOWN, buff = MED_BUFF, aligned_edge = LEFT)
dot_with_cross.next_to(func, RIGHT)
self.add(vector_word)
self.play(Write(cross))
self.dither()
self.play(FadeIn(transform_word))
self.play(Write(transform))
self.dither()
self.play(Transform(det_text, dot_with_cross))
self.dither()
| [
"grantsanderson7@gmail.com"
] | grantsanderson7@gmail.com |
5659fac1f5b514ddd3691759dde6533c4cbf6b48 | 4eaab9327d25f851f9e9b2cf4e9687d5e16833f7 | /problems/add_binary/solution.py | af644f463bbed2e7d591ff215c7088ac01b99e58 | [] | no_license | kadhirash/leetcode | 42e372d5e77d7b3281e287189dcc1cd7ba820bc0 | 72aea7d43471e529ee757ff912b0267ca0ce015d | refs/heads/master | 2023-01-21T19:05:15.123012 | 2020-11-28T13:53:11 | 2020-11-28T13:53:11 | 250,115,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | class Solution:
def addBinary(self, a: str, b: str) -> str:
# 0 + 0 --> 0
# 0 + 1 --> 1
# 1 + 0 --> 0
# 1 + 1 --> 0, carry 1
# Loop through a
# Loop through b
# 4 if statements
x,y = int(a,2), int(b,2) # answer, carry, respectively
while y != 0:
answer = x^y # XOR
carry = (x & y) << 1 # AND + bit-shift left
x , y = answer , carry
return bin(x)[2:] | [
"kadhirash@gmail.com"
] | kadhirash@gmail.com |
f0c4e0c75d7e48d3ec360646e4ee4bd809feb185 | d3fa8ded9d393ba9b03388ba7f05fc559cf31d1e | /Codes/monitor/checks/collector.py | b60006b4fa96e0546faabe7a8cb8eae0b2dd640f | [] | no_license | lengxu/YouYun | e20c4d8f553ccb245e96de177a67f776666e986f | b0ad8fd0b0e70dd2445cecb9ae7b00f7e0a20815 | refs/heads/master | 2020-09-13T22:30:49.642980 | 2017-11-27T03:13:34 | 2017-11-27T03:13:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,843 | py | import collections
import logging
import pprint
import socket
import sys
import time
import checks.system.unix as u
import checks.system.win32 as w32
import modules
from checks import AGENT_METRICS_CHECK_NAME, AgentCheck, create_service_check
from checks.check_status import (
CheckStatus,
CollectorStatus,
EmitterStatus,
STATUS_ERROR,
STATUS_OK,
)
from checks.datamonitor import DdForwarder, monitorstreams
from checks.ganglia import Ganglia
from config import get_system_stats, get_version
from gohai import get_gohai_data
from util import (
EC2,
GCE,
get_os,
Timer,
get_ip,
get_machine_type,
)
from util import get_uuid
from utils.jmx import JMXFiles
from utils.logger import log_exceptions
from utils.platform import Platform
log = logging.getLogger(__name__)
FLUSH_LOGGING_PERIOD = 10
FLUSH_LOGGING_INITIAL = 5
DD_CHECK_TAG = 'dd_check:{0}'
class AgentPayload(collections.MutableMapping):
METADATA_KEYS = frozenset(['meta', 'tags', 'host-tags', 'systemStats',
'agent_checks', 'external_host_tags'])
DUPLICATE_KEYS = frozenset(['apiKey', 'agentVersion'])
COMMON_ENDPOINT = ''
DATA_ENDPOINT = 'metrics'
METADATA_ENDPOINT = 'metadata'
def __init__(self):
self.data_payload = dict()
self.meta_payload = dict()
@property
def payload(self):
res = self.data_payload.copy()
res.update(self.meta_payload)
return res
def __getitem__(self, key):
if key in self.METADATA_KEYS:
return self.meta_payload[key]
else:
return self.data_payload[key]
def __setitem__(self, key, value):
if key in self.DUPLICATE_KEYS:
self.data_payload[key] = value
self.meta_payload[key] = value
elif key in self.METADATA_KEYS:
self.meta_payload[key] = value
else:
self.data_payload[key] = value
def __delitem__(self, key):
if key in self.DUPLICATE_KEYS:
del self.data_payload[key]
del self.meta_payload[key]
elif key in self.METADATA_KEYS:
del self.meta_payload[key]
else:
del self.data_payload[key]
def __iter__(self):
for item in self.data_payload:
yield item
for item in self.meta_payload:
yield item
def __len__(self):
return len(self.data_payload) + len(self.meta_payload)
def emit(self, log, config, emitters, continue_running, merge_payloads=True):
statuses = []
def _emit_payload(payload, endpoint):
statuses = []
for emitter in emitters:
if not continue_running:
return statuses
name = emitter.__name__
emitter_status = EmitterStatus(name)
try:
emitter(payload, log, config, endpoint)
except Exception, e:
log.exception("Error running emitter: %s"
% emitter.__name__)
emitter_status = EmitterStatus(name, e)
statuses.append(emitter_status)
return statuses
if merge_payloads:
statuses.extend(_emit_payload(self.payload, self.COMMON_ENDPOINT))
else:
statuses.extend(_emit_payload(self.data_payload, self.DATA_ENDPOINT))
statuses.extend(_emit_payload(self.meta_payload, self.METADATA_ENDPOINT))
return statuses
class Collector(object):
def __init__(self, agentConfig, emitters, systemStats, hostname):
self.ip = get_ip(agentConfig)
self.emit_duration = None
self.agentConfig = agentConfig
self.hostname = hostname
self.agentConfig['system_stats'] = systemStats
self.os = get_os()
self.plugins = None
self.emitters = emitters
self.check_timings = agentConfig.get('check_timings')
self.push_times = {
'host_metadata': {
'start': time.time(),
'interval': int(agentConfig.get('metadata_interval', 4 * 60 * 60))
},
'external_host_tags': {
'start': time.time() - 3 * 60,
'interval': int(agentConfig.get('external_host_tags', 5 * 60))
},
'agent_checks': {
'start': time.time(),
'interval': int(agentConfig.get('agent_checks_interval', 10 * 60))
},
}
socket.setdefaulttimeout(15)
self.run_count = 0
self.continue_running = True
self.hostname_metadata_cache = None
self.initialized_checks_d = []
self.init_failed_checks_d = {}
self._unix_system_checks = {
'io': u.IO(log),
'load': u.Load(log),
'memory': u.Memory(log),
'processes': u.Processes(log),
'cpu': u.Cpu(log),
'system': u.System(log)
}
self._win32_system_checks = {
'io': w32.IO(log),
'proc': w32.Processes(log),
'memory': w32.Memory(log),
'network': w32.Network(log),
'cpu': w32.Cpu(log),
'system': w32.System(log)
}
self._ganglia = Ganglia(log)
self._monitorstream = monitorstreams.init(log, self.agentConfig)
self._ddforwarder = DdForwarder(log, self.agentConfig)
self._agent_metrics = None
self._metrics_checks = []
for module_spec in [s.strip() for s in self.agentConfig.get('custom_checks', '').split(',')]:
if len(module_spec) == 0:
continue
try:
self._metrics_checks.append(modules.load(module_spec, 'Check')(log))
log.info("Registered custom check %s" % module_spec)
log.warning(
"Old format custom checks are deprecated. They should be moved to the checks.d interface as old custom checks will be removed in a next version")
except Exception:
log.exception('Unable to load custom check module %s' % module_spec)
def stop(self):
self.continue_running = False
for check in self.initialized_checks_d:
check.stop()
@staticmethod
def _stats_for_display(raw_stats):
return pprint.pformat(raw_stats, indent=4)
@log_exceptions(log)
def run(self, checksd=None, start_event=True, configs_reloaded=False):
log.debug("Found {num_checks} checks".format(num_checks=len(checksd['initialized_checks'])))
timer = Timer()
if not Platform.is_windows():
cpu_clock = time.clock()
self.run_count += 1
log.debug("Starting collection run #%s" % self.run_count)
if checksd:
self.initialized_checks_d = checksd['initialized_checks']
self.init_failed_checks_d = checksd['init_failed_checks']
payload = AgentPayload()
if not self._agent_metrics or configs_reloaded:
for check in self.initialized_checks_d:
if check.name == AGENT_METRICS_CHECK_NAME:
self._agent_metrics = check
self.initialized_checks_d.remove(check)
break
self._build_payload(payload)
metrics = payload['metrics']
events = payload['events']
service_checks = payload['service_checks']
if Platform.is_windows():
try:
metrics.extend(self._win32_system_checks['memory'].check(self.agentConfig))
metrics.extend(self._win32_system_checks['cpu'].check(self.agentConfig))
metrics.extend(self._win32_system_checks['network'].check(self.agentConfig))
metrics.extend(self._win32_system_checks['io'].check(self.agentConfig))
metrics.extend(self._win32_system_checks['proc'].check(self.agentConfig))
metrics.extend(self._win32_system_checks['system'].check(self.agentConfig))
except Exception:
log.exception('Unable to fetch Windows system metrics.')
else:
sys_checks = self._unix_system_checks
load = sys_checks['load'].check(self.agentConfig)
payload.update(load)
system = sys_checks['system'].check(self.agentConfig)
payload.update(system)
memory = sys_checks['memory'].check(self.agentConfig)
if memory:
memstats = {
'memPhysUsed': memory.get('physUsed'),
'memPhysPctUsage': memory.get('physPctUsage'),
'memPhysFree': memory.get('physFree'),
'memPhysTotal': memory.get('physTotal'),
'memPhysUsable': memory.get('physUsable'),
'memSwapUsed': memory.get('swapUsed'),
'memSwapFree': memory.get('swapFree'),
'memSwapPctFree': memory.get('swapPctFree'),
'memSwapTotal': memory.get('swapTotal'),
'memCached': memory.get('physCached'),
'memBuffers': memory.get('physBuffers'),
'memShared': memory.get('physShared'),
'memSlab': memory.get('physSlab'),
'memPageTables': memory.get('physPageTables'),
'memSwapCached': memory.get('swapCached')
}
payload.update(memstats)
ioStats = sys_checks['io'].check(self.agentConfig)
if ioStats:
payload['ioStats'] = ioStats
processes = sys_checks['processes'].check(self.agentConfig)
payload.update({'processes': processes})
cpuStats = sys_checks['cpu'].check(self.agentConfig)
if cpuStats:
payload.update(cpuStats)
gangliaData = self._ganglia.check(self.agentConfig)
monitorstreamData = self._monitorstream.check(self.agentConfig)
ddforwarderData = self._ddforwarder.check(self.agentConfig)
if gangliaData is not False and gangliaData is not None:
payload['ganglia'] = gangliaData
if monitorstreamData:
monitorstreamEvents = monitorstreamData.get('monitorstreamEvents', None)
if monitorstreamEvents:
if 'monitorstream' in payload['events']:
events['monitorstream'].extend(monitorstreamEvents)
else:
events['monitorstream'] = monitorstreamEvents
del monitorstreamData['monitorstreamEvents']
payload.update(monitorstreamData)
if ddforwarderData:
payload['datamonitor'] = ddforwarderData
for metrics_check in self._metrics_checks:
res = metrics_check.check(self.agentConfig)
if res:
metrics.extend(res)
check_statuses = []
for check in self.initialized_checks_d:
if not self.continue_running:
return
log.info("Running check %s" % check.name)
instance_statuses = []
metric_count = 0
event_count = 0
service_check_count = 0
check_start_time = time.time()
check_stats = None
try:
instance_statuses = check.run()
current_check_metrics = check.get_metrics()
current_check_events = check.get_events()
check_stats = check._get_internal_profiling_stats()
current_check_metadata = check.get_service_metadata()
metrics.extend(current_check_metrics)
if current_check_events:
if check.name not in events:
events[check.name] = current_check_events
else:
events[check.name] += current_check_events
metric_count = len(current_check_metrics)
event_count = len(current_check_events)
except Exception:
log.exception("Error running check %s" % check.name)
check_status = CheckStatus(
check.name, instance_statuses, metric_count,
event_count, service_check_count, service_metadata=current_check_metadata,
library_versions=check.get_library_info(),
source_type_name=check.SOURCE_TYPE_NAME or check.name,
check_stats=check_stats
)
service_check_tags = ["check:%s" % check.name]
if check_status.status == STATUS_OK:
status = AgentCheck.OK
elif check_status.status == STATUS_ERROR:
status = AgentCheck.CRITICAL
check.service_check('datamonitor.agent.check_status', status, tags=service_check_tags)
current_check_service_checks = check.get_service_checks()
if current_check_service_checks:
service_checks.extend(current_check_service_checks)
service_check_count = len(current_check_service_checks)
check_status.service_check_count = service_check_count
check_statuses.append(check_status)
check_run_time = time.time() - check_start_time
log.debug("Check %s ran in %.2f s" % (check.name, check_run_time))
if self.check_timings:
metric = 'datamonitor.agent.check_run_time'
meta = {'tags': ["check:%s" % check.name]}
metrics.append((metric, time.time(), check_run_time, meta))
for check_name, info in self.init_failed_checks_d.iteritems():
if not self.continue_running:
return
check_status = CheckStatus(check_name, None, None, None, None,
init_failed_error=info['error'],
init_failed_traceback=info['error'])
check_statuses.append(check_status)
service_checks.append(create_service_check('datamonitor.agent.up', AgentCheck.OK,
hostname=self.hostname))
payload['metrics'] = metrics
payload['events'] = events
payload['service_checks'] = service_checks
self._populate_payload_metadata(payload, check_statuses, start_event)
collect_duration = timer.step()
if self._agent_metrics:
metric_context = {
'collection_time': collect_duration,
'emit_time': self.emit_duration,
}
if not Platform.is_windows():
metric_context['cpu_time'] = time.clock() - cpu_clock
self._agent_metrics.set_metric_context(payload, metric_context)
self._agent_metrics.run()
agent_stats = self._agent_metrics.get_metrics()
payload['metrics'].extend(agent_stats)
if self.agentConfig.get('developer_mode'):
log.debug("\n Agent developer mode stats: \n {0}".format(
Collector._stats_for_display(agent_stats))
)
self._agent_metrics.get_service_metadata()
emitter_statuses = payload.emit(log, self.agentConfig, self.emitters,
self.continue_running)
self.emit_duration = timer.step()
try:
CollectorStatus(check_statuses, emitter_statuses,
self.hostname_metadata_cache).persist()
except Exception:
log.exception("Error persisting collector status")
if self.run_count <= FLUSH_LOGGING_INITIAL or self.run_count % FLUSH_LOGGING_PERIOD == 0:
log.info("Finished run #%s. Collection time: %ss. Emit time: %ss" %
(self.run_count, round(collect_duration, 2), round(self.emit_duration, 2)))
if self.run_count == FLUSH_LOGGING_INITIAL:
log.info("First flushes done, next flushes will be logged every %s flushes." %
FLUSH_LOGGING_PERIOD)
else:
log.debug("Finished run #%s. Collection time: %ss. Emit time: %ss" %
(self.run_count, round(collect_duration, 2), round(self.emit_duration, 2)))
return payload
@staticmethod
def run_single_check(check, verbose=True):
log.info("Running check %s" % check.name)
instance_statuses = []
metric_count = 0
event_count = 0
service_check_count = 0
check_stats = None
try:
instance_statuses = check.run()
current_check_metrics = check.get_metrics()
current_check_events = check.get_events()
current_service_checks = check.get_service_checks()
current_service_metadata = check.get_service_metadata()
check_stats = check._get_internal_profiling_stats()
metric_count = len(current_check_metrics)
event_count = len(current_check_events)
service_check_count = len(current_service_checks)
print "Metrics: \n{0}".format(pprint.pformat(current_check_metrics))
print "Events: \n{0}".format(pprint.pformat(current_check_events))
print "Service Checks: \n{0}".format(pprint.pformat(current_service_checks))
print "Service Metadata: \n{0}".format(pprint.pformat(current_service_metadata))
except Exception:
log.exception("Error running check %s" % check.name)
check_status = CheckStatus(
check.name, instance_statuses, metric_count,
event_count, service_check_count,
library_versions=check.get_library_info(),
source_type_name=check.SOURCE_TYPE_NAME or check.name,
check_stats=check_stats
)
return check_status
def _emit(self, payload):
statuses = []
for emitter in self.emitters:
if not self.continue_running:
return statuses
name = emitter.__name__
emitter_status = EmitterStatus(name)
try:
emitter(payload, log, self.agentConfig)
except Exception, e:
log.exception("Error running emitter: %s" % emitter.__name__)
emitter_status = EmitterStatus(name, e)
statuses.append(emitter_status)
return statuses
def _is_first_run(self):
return self.run_count <= 1
def _build_payload(self, payload):
now = time.time()
payload['ip'] = self.ip
payload['collection_timestamp'] = now
payload['os'] = self.os
payload['python'] = sys.version
payload['agentVersion'] = self.agentConfig['version']
payload['apiKey'] = self.agentConfig['api_key']
payload['events'] = {}
payload['metrics'] = []
payload['service_checks'] = []
payload['resources'] = {}
payload['internalHostname'] = self.hostname
payload['uuid'] = get_uuid()
payload['machine_type'] = get_machine_type()
log.info('UUID: {0}'.format(payload['uuid']))
log.info('Machine_Type: {0}'.format(payload['machine_type']))
payload['host-tags'] = {}
payload['external_host_tags'] = {}
def _populate_payload_metadata(self, payload, check_statuses, start_event=True):
now = time.time()
if start_event and self._is_first_run():
payload['systemStats'] = self.agentConfig.get('system_stats', {})
payload['events']['System'] = [{
'api_key': self.agentConfig['api_key'],
'host': payload['internalHostname'],
'timestamp': now,
'event_type': 'Agent Startup',
'msg_text': 'Version %s' % get_version()
}]
if self._should_send_additional_data('host_metadata'):
payload['gohai'] = get_gohai_data()
payload['systemStats'] = get_system_stats()
payload['meta'] = self._get_hostname_metadata()
log.info('GOHAI data: {0}'.format(payload['gohai']))
self.hostname_metadata_cache = payload['meta']
host_tags = []
if self.agentConfig['tags'] is not None:
host_tags.extend([tag.strip()
for tag in self.agentConfig['tags'].split(",")])
if self.agentConfig['collect_ec2_tags']:
host_tags.extend(EC2.get_tags(self.agentConfig))
if host_tags:
payload['host-tags']['system'] = host_tags
if self.agentConfig['create_dd_check_tags']:
app_tags_list = [DD_CHECK_TAG.format(c.name) for c in self.initialized_checks_d]
app_tags_list.extend([DD_CHECK_TAG.format(cname) for cname
in JMXFiles.get_jmx_appnames()])
if 'system' not in payload['host-tags']:
payload['host-tags']['system'] = []
payload['host-tags']['system'].extend(app_tags_list)
GCE_tags = GCE.get_tags(self.agentConfig)
if GCE_tags is not None:
payload['host-tags'][GCE.SOURCE_TYPE_NAME] = GCE_tags
if self._is_first_run():
log.info("Hostnames: %s, tags: %s" %
(repr(self.hostname_metadata_cache), payload['host-tags']))
external_host_tags = []
if self._should_send_additional_data('external_host_tags'):
for check in self.initialized_checks_d:
try:
getter = getattr(check, 'get_external_host_tags')
check_tags = getter()
external_host_tags.extend(check_tags)
except AttributeError:
pass
if external_host_tags:
payload['external_host_tags'] = external_host_tags
if self._should_send_additional_data('agent_checks'):
agent_checks = []
for check in check_statuses:
if check.instance_statuses is not None:
for i, instance_status in enumerate(check.instance_statuses):
agent_checks.append(
(
check.name, check.source_type_name,
instance_status.instance_id,
instance_status.status,
instance_status.error or instance_status.warnings or "",
check.service_metadata[i]
)
)
else:
agent_checks.append(
(
check.name, check.source_type_name,
"initialization",
check.status, repr(check.init_failed_error)
)
)
payload['agent_checks'] = agent_checks
payload['meta'] = self.hostname_metadata_cache
def _get_hostname_metadata(self):
metadata = EC2.get_metadata(self.agentConfig)
if metadata.get('hostname'):
metadata['ec2-hostname'] = metadata.get('hostname')
del metadata['hostname']
if self.agentConfig.get('hostname'):
metadata['agent-hostname'] = self.agentConfig.get('hostname')
else:
try:
metadata["socket-hostname"] = socket.gethostname()
except Exception:
pass
try:
metadata["socket-fqdn"] = socket.getfqdn()
except Exception:
pass
metadata["hostname"] = self.hostname
metadata["timezones"] = sanitize_tzname(time.tzname)
host_aliases = GCE.get_host_aliases(self.agentConfig)
if host_aliases:
metadata['host_aliases'] = host_aliases
return metadata
def _should_send_additional_data(self, data_name):
if self._is_first_run():
return True
now = time.time()
if now - self.push_times[data_name]['start'] >= self.push_times[data_name]['interval']:
log.debug('%s interval has passed. Sending it.' % data_name)
self.push_times[data_name]['start'] = now
return True
return False
def sanitize_tzname(tzname):
if tzname[0] == '\x93\x8c\x8b\x9e (\x95W\x8f\x80\x8e\x9e)':
log.debug('tzname from TOKYO detected and converted')
return ('JST', 'JST')
elif tzname[0] == '\xd6\xd0\xb9\xfa\xb1\xea\xd7\xbc\xca\xb1\xbc\xe4':
log.debug('tzname from CHINA detected and converted')
return ('UTC', 'UTC')
else:
return tzname
| [
"smartbrandnew@163.com"
] | smartbrandnew@163.com |
3f9551a286e4524b521bcb829455d5be05fab4f4 | de81be0bc3e2770ed68db2e4c116140cf2a42da3 | /10.Loop/2.py | 1014cf65e468817146f44c6cce592c3bcda8063a | [] | no_license | alstn2468/python-study-opentutorials | f2d993a3094fa2d6603fb748a3b77c89b24ab013 | 28eabcb00c449c5409dbfbf559f2c0d4a5142c8a | refs/heads/master | 2021-10-08T13:08:50.541828 | 2018-03-24T14:08:40 | 2018-03-24T14:08:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 58 | py | while True:
print("Hello world")
print("After while")
| [
"alstn2468_@naver.com"
] | alstn2468_@naver.com |
8a91de2422a05acc4045bbd892ae5854e9bf2d7d | 4e1d7747a1c916a20546a307603a2191119b7895 | /网络/osi七层协议.py | ead6b1583455774578ed093917ad631329fd6b4d | [] | no_license | zhio/python_notebook | 3cd85850cd4cc67f81cc97726e78ccd3b96ce152 | 9a3640062002c3d0775be2f7d0342bcbed7de757 | refs/heads/main | 2023-06-22T01:25:28.451707 | 2021-07-22T08:30:49 | 2021-07-22T08:30:49 | 388,369,285 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | """
1.应用层:HTTP
2.表现层
3.会话层
4.传输层:TCP,UDP
5.网络层:IP,ARP
6.数据链路层
7.物理层
""" | [
"chenjb04@163.com"
] | chenjb04@163.com |
f0811e61669d74bf1815b34a8639d8024adcf499 | b7fccda9944b25c5c9b5a91253eac24e1c4c9b23 | /tests/settings.py | 4003a9bf4dae22e89994804f2dcb8b53953c8fb0 | [
"BSD-3-Clause"
] | permissive | ouhouhsami/django-geoads | 96f47fd17496a13d611b6ed2462f32dfa81c4401 | bec8f9ce8b8744775aee26b14a884598a599d9af | refs/heads/master | 2021-01-01T16:51:23.715917 | 2013-11-16T23:13:52 | 2013-11-16T23:13:52 | 4,099,272 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,638 | py | # Django settings for localized_classified_ads project.
import os
import sys
DEBUG = True
TEMPLATE_DEBUG = DEBUG
GEOADS_ASYNC = False
#EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
EMAIL_FILE_PATH = 'tmp/email-messages/'
SITE_ROOT = os.path.dirname(os.path.realpath(__file__))
ADMINS = ('admin@geoads.com',)
MANAGERS = ADMINS
DJANGO_MODERATION_MODERATORS = (
'test@example.com',
)
TEST_RUNNER = 'django_coverage.coverage_runner.CoverageRunner'
# I exclude admin.py files from my coverage
# these files does'nt set anything spectial
COVERAGE_MODULE_EXCLUDES = ['tests$','factories', 'settings$', 'urls$', 'locale$', '__init__', 'django',
'migrations', 'admin']
COVERAGE_REPORT_HTML_OUTPUT_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'coverage_report')
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'geoads_db',
'USER': 'postgres',
}
}
GEOCODE = 'nominatim'
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Paris'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'fr-FR'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'abc'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
ROOT_URLCONF = 'tests.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.gis',
'django.contrib.flatpages',
'django.contrib.sitemaps',
'django_filters',
'django_rq',
'moderation',
'customads',
'geoads',
'geoads.contrib.moderation',
)
# specific test setting for coverage information
#SOUTH_TESTS_MIGRATE = False
#SKIP_SOUTH_TESTS = True
# for testing purposes, profile page = home/search page
ADS_PROFILE_URL = '/'
# for testing purposes, profile signup page = home/search page
ADS_PROFILE_SIGNUP = '/'
# QUEUE
RQ_QUEUES = {
'default': {
'HOST': 'localhost',
'PORT': 6379,
'DB': 0,
},
}
if DEBUG:
for queueConfig in RQ_QUEUES.itervalues():
queueConfig['ASYNC'] = False
| [
"samuel.goldszmidt@gmail.com"
] | samuel.goldszmidt@gmail.com |
2b01732f00eccf85447635c842aeea87107b29a2 | 1dacbf90eeb384455ab84a8cf63d16e2c9680a90 | /pkgs/dask-0.8.1-py27_0/lib/python2.7/site-packages/dask/diagnostics/progress.py | 42babc30a3fe0b3d4de1caae9af138a8e30b9c1a | [
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
] | permissive | wangyum/Anaconda | ac7229b21815dd92b0bd1c8b7ec4e85c013b8994 | 2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6 | refs/heads/master | 2022-10-21T15:14:23.464126 | 2022-10-05T12:10:31 | 2022-10-05T12:10:31 | 76,526,728 | 11 | 10 | Apache-2.0 | 2022-10-05T12:10:32 | 2016-12-15T05:26:12 | Python | UTF-8 | Python | false | false | 4,169 | py | from __future__ import absolute_import, division, print_function
import sys
import threading
import time
from timeit import default_timer
from ..callbacks import Callback
from ..utils import ignoring
def format_time(t):
"""Format seconds into a human readable form.
>>> format_time(10.4)
'10.4s'
>>> format_time(1000.4)
'16min 40.4s'
"""
m, s = divmod(t, 60)
h, m = divmod(m, 60)
if h:
return '{0:2.0f}hr {1:2.0f}min {2:4.1f}s'.format(h, m, s)
elif m:
return '{0:2.0f}min {1:4.1f}s'.format(m, s)
else:
return '{0:4.1f}s'.format(s)
class ProgressBar(Callback):
"""A progress bar for dask.
Parameters
----------
minimum : int, optional
Minimum time threshold in seconds before displaying a progress bar.
Default is 0 (always display)
width : int, optional
Width of the bar
dt : float, optional
Update resolution in seconds, default is 0.1 seconds
Examples
--------
Below we create a progress bar with a minimum threshold of 1 second before
displaying. For cheap computations nothing is shown:
>>> with ProgressBar(minimum=1.0): # doctest: +SKIP
... out = some_fast_computation.compute()
But for expensive computations a full progress bar is displayed:
>>> with ProgressBar(minimum=1.0): # doctest: +SKIP
... out = some_slow_computation.compute()
[########################################] | 100% Completed | 10.4 s
The duration of the last computation is available as an attribute
>>> pbar = ProgressBar()
>>> with pbar: # doctest: +SKIP
... out = some_computation.compute()
[########################################] | 100% Completed | 10.4 s
>>> pbar.last_duration # doctest: +SKIP
10.4
You can also register a progress bar so that it displays for all
computations:
>>> pbar = ProgressBar() # doctest: +SKIP
>>> pbar.register() # doctest: +SKIP
>>> some_slow_computation.compute() # doctest: +SKIP
[########################################] | 100% Completed | 10.4 s
"""
def __init__(self, minimum=0, width=40, dt=0.1):
self._minimum = minimum
self._width = width
self._dt = dt
self.last_duration = 0
def _start(self, dsk):
self._state = None
self._start_time = default_timer()
# Start background thread
self._running = True
self._timer = threading.Thread(target=self._timer_func)
self._timer.start()
def _pretask(self, key, dsk, state):
self._state = state
sys.stdout.flush()
def _finish(self, dsk, state, errored):
self._running = False
self._timer.join()
elapsed = default_timer() - self._start_time
self.last_duration = elapsed
if elapsed < self._minimum:
return
if not errored:
self._draw_bar(1, elapsed)
else:
self._update_bar(elapsed)
sys.stdout.write('\n')
sys.stdout.flush()
def _timer_func(self):
"""Background thread for updating the progress bar"""
while self._running:
elapsed = default_timer() - self._start_time
if elapsed > self._minimum:
self._update_bar(elapsed)
time.sleep(self._dt)
def _update_bar(self, elapsed):
s = self._state
if not s:
self._draw_bar(0, elapsed)
return
ndone = len(s['finished'])
ntasks = sum(len(s[k]) for k in ['ready', 'waiting', 'running']) + ndone
self._draw_bar(ndone / ntasks if ntasks else 0, elapsed)
def _draw_bar(self, frac, elapsed):
bar = '#' * int(self._width * frac)
percent = int(100 * frac)
elapsed = format_time(elapsed)
msg = '\r[{0:<{1}}] | {2}% Completed | {3}'.format(bar, self._width,
percent, elapsed)
with ignoring(ValueError):
sys.stdout.write(msg)
sys.stdout.flush()
| [
"wgyumg@mgail.com"
] | wgyumg@mgail.com |
69f8539cd9c2a5ff7d1f58e302cc31eda63563b1 | 73b8aba05ee1424f38a8598a9f1305185588075f | /0x0B-python-input_output/9-student.py | 47edce80a94d4ef2c2595c7c27eabbf247e1dbce | [] | no_license | nicolasportela/holbertonschool-higher_level_programming | 0d176c0e56f4f703c1e9a98b430fc6120f22f675 | e1537b81f21118456e5cfa0e4ed89520b232adb6 | refs/heads/master | 2023-04-20T21:30:22.693434 | 2021-05-13T01:47:30 | 2021-05-13T01:47:30 | 319,397,633 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | #!/usr/bin/python3
"""this module writes a class named Student"""
class Student:
"""class creation"""
def __init__(self, first_name, last_name, age):
"""instantiation"""
self.first_name = first_name
self.last_name = last_name
self.age = age
def to_json(self):
"""retrieves a dictionary representation"""
return self.__dict__
| [
"2103@holbertonschool.com"
] | 2103@holbertonschool.com |
6d9800cf9a9495c4a42c8fe5a5a58a07ce058822 | 13b14c9c75143bf2eda87cb4a41006a52dd6f02b | /AOJ/ITP1_3_B/ITP1_3_B.py | 93ea6cff7ce0266ca8344cacfb468e3abaddbee7 | [] | no_license | yutaka-watanobe/problem-solving | 2c311ac856c79c20aef631938140118eb3bc3835 | f0b92125494fbd3c8d203989ec9fef53f52ad4b4 | refs/heads/master | 2021-06-03T12:58:39.881107 | 2020-12-16T14:34:16 | 2020-12-16T14:34:16 | 94,963,754 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | i = 1
while True:
x = int(input())
if x == 0: break
print(f"Case {i}: {x}")
i += 1
| [
"y.watanobe@gmail.com"
] | y.watanobe@gmail.com |
7245421fc62c4bc78c583bb78c878771fd612113 | 2626f6e6803c8c4341d01f57228a0fe117e3680b | /students/alexLaws/lesson06/Calculator/unit-test.py | 4aabc968ccbaa596943a18f7b0d8b90f8d88ec8d | [] | no_license | kmsnyde/SP_Online_Course2_2018 | 9e59362da253cdec558e1c2f39221c174d6216f3 | 7fe8635b47d4792a8575e589797260ad0a2b027e | refs/heads/master | 2020-03-19T17:15:03.945523 | 2018-09-05T22:28:55 | 2018-09-05T22:28:55 | 136,750,231 | 0 | 0 | null | 2018-06-09T19:01:52 | 2018-06-09T19:01:51 | null | UTF-8 | Python | false | false | 2,831 | py | from unittest import TestCase
from unittest.mock import MagicMock
from calculator.adder import Adder
from calculator.subtracter import Subtracter
from calculator.multiplier import Multiplier
from calculator.divider import Divider
from calculator.calculator import Calculator
from calculator.exceptions import InsufficientOperands
class AdderTests(TestCase):
def test_adding(self):
adder = Adder()
for i in range(-10, 10):
for j in range(-10, 10):
self.assertEqual(i + j, adder.calc(i, j))
class SubtracterTests(TestCase):
def test_subtracting(self):
subtracter = Subtracter()
for i in range(-10, 10):
for j in range(-10, 10):
self.assertEqual(i - j, subtracter.calc(i, j))
class MultiplierTests(TestCase):
def test_multiplying(self):
multiplier = Multiplier()
for i in range(-10, 10):
for j in range(-10, 10):
self.assertEqual(i * j, multiplier.calc(i, j))
class DividerTests(TestCase):
def test_dividing(self):
divider = Divider()
for i in range(1, 10):
for j in range(1, 10):
self.assertEqual(i / j, divider.calc(i, j))
class CalculatorTests(TestCase):
def setUp(self):
self.adder = Adder()
self.subtracter = Subtracter()
self.multiplier = Multiplier()
self.divider = Divider()
self.calculator = Calculator(self.adder, self.subtracter, self.multiplier, self.divider)
def test_insufficient_operands(self):
self.calculator.enter_number(0)
with self.assertRaises(InsufficientOperands):
self.calculator.add()
def test_adder_call(self):
self.adder.calc = MagicMock(return_value=0)
self.calculator.enter_number(1)
self.calculator.enter_number(2)
self.calculator.add()
self.adder.calc.assert_called_with(1, 2)
def test_subtracter_call(self):
self.subtracter.calc = MagicMock(return_value=0)
self.calculator.enter_number(1)
self.calculator.enter_number(2)
self.calculator.subtract()
self.subtracter.calc.assert_called_with(1, 2)
def test_multiplier_call(self):
self.multiplier.calc = MagicMock(return_value=0)
self.calculator.enter_number(1)
self.calculator.enter_number(2)
self.calculator.multiply()
self.multiplier.calc.assert_called_with(1, 2)
def test_divider_call(self):
self.divider.calc = MagicMock(return_value=0)
self.calculator.enter_number(1)
self.calculator.enter_number(2)
self.calculator.divide()
self.divider.calc.assert_called_with(1, 2)
| [
"kmsnyder2@verizon.net"
] | kmsnyder2@verizon.net |
e3402ab9f154e153963567c7195d6764d66c8b2b | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_6377668744314880_0/Python/hero777/15_3_1s.py | 6a14e7f31677be4fe6912de46c33060887d121b9 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 962 | py | f = open('a.txt','r')
f0 = open('output.txt','w')
a0 = [int(x) for x in f.readline().split()]
for index in range (0,a0[0]):
x, = [int(x) for x in f.readline().split()]
f0.write('Case #')
f0.write(str(index+1))
f0.write(': ')
f0.write('\n')
x1 = [1]*x
y1 = [1]*x
for index2 in range (0,x):
x1[index2], y1[index2], = [int(q) for q in f.readline().split()]
for index2 in range (0,x):
counter = x-2
for index3 in range (0, x):
counter1 = 0
counter2 = 0
if index3 == index2:
continue
for index4 in range (0, x):
if (index4 == index3 or index4 == index2):
continue
z = long(y1[index3]-y1[index2])*long(x1[index4] - x1[index3]) - long(x1[index3] - x1[index2])*long(y1[index4] - y1[index3])
print(z)
if z < 0:
counter1 = counter1 +1
if z>0:
counter2 = counter2 +1
we = min(counter1,counter2)
counter = min(counter, we)
ans = max(counter,0)
f0.write(str(ans))
f0.write('\n')
| [
"eewestman@gmail.com"
] | eewestman@gmail.com |
78136ab0a5146745ae8d5ba53635c748b109beda | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p00002/s705893489.py | 07943c2872474d02f692c0b66ec4f7740cc08380 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 138 | py | while 1:
try:
a,b = map(int, raw_input().split())
n = a+b
print len(str(n))
except EOFError:
break | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
cefd0697efe5cc2ff06a3562f310f1133c9a5da1 | a9e81c87022fdde86d47a4ec1e74791da8aa0e30 | /python-learning/libraries/pyqt5/base/animation/qeasingcurve-demo.py | 13c911e44108f0f823b9d5eab00fd31546f00ef0 | [
"Apache-2.0"
] | permissive | ymli1997/deeplearning-notes | c5c6926431b7efc1c6823d85e3eb470f3c986494 | f2317d80cd998305814f988e5000241797205b63 | refs/heads/master | 2020-07-29T11:15:43.689307 | 2018-05-05T10:58:18 | 2018-05-05T10:58:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,212 | py | # -*- coding: utf-8 -*-
'''
属性控件
'''
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
import sys
class Form(QMainWindow):
def __init__(self,parent=None):
super().__init__(parent)
centerWidget = QWidget()
layout = QHBoxLayout()
# 添加控件代码
button1 = QPushButton("Animation")
button1.setGeometry(10,10,100,30)
animation = QPropertyAnimation(button1,b"geometry")
animation.setDuration(3000)
animation.setStartValue(QRect(10,10,100,30))
animation.setEndValue(QRect(200,150,100,30))
easingCurve = QEasingCurve(QEasingCurve.InBack)
animation.setEasingCurve(easingCurve)
button2 = QPushButton("Start")
button2.setGeometry(120,10,100,30)
button2.clicked.connect(lambda :animation.start())
layout.addWidget(button1)
layout.addWidget(button2)
centerWidget.setLayout(layout)
self.setCentralWidget(centerWidget)
self.resize(640,480)
self.setWindowTitle("PyQt5-")
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Form()
ex.show()
sys.exit(app.exec_()) | [
"kkoolerter@gmail.com"
] | kkoolerter@gmail.com |
508ba6f49b063a1b8f7951ed21108506687749d2 | b8e9dd6fd8f8b691cba5a3af2388467bcf6c90bb | /samples/openapi3/client/petstore/python-experimental/petstore_api/model/class_model.pyi | c26397416a52ea3ee2cb8504d252698b9b42857c | [
"Apache-2.0"
] | permissive | FallenRiteMonk/openapi-generator | f8b98940219eecf14dc76dced4b0fbd394522aa3 | b6576d11733ecad6fa4a0a616e1a06d502a771b7 | refs/heads/master | 2023-03-16T05:23:36.501909 | 2022-09-02T01:46:56 | 2022-09-02T01:46:56 | 164,609,299 | 0 | 0 | Apache-2.0 | 2019-01-08T09:08:56 | 2019-01-08T09:08:56 | null | UTF-8 | Python | false | false | 2,550 | pyi | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from petstore_api import schemas # noqa: F401
class ClassModel(
schemas.AnyTypeSchema,
):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Model for testing model with "_class" property
"""
class MetaOapg:
class properties:
_class = schemas.StrSchema
__annotations__ = {
"_class": _class,
}
additional_properties = schemas.AnyTypeSchema
_class: typing.Union[MetaOapg.properties._class, schemas.Unset]
@typing.overload
def __getitem__(self, name: typing.Literal["_class"]) -> typing.Union[MetaOapg.properties._class, schemas.Unset]: ...
@typing.overload
def __getitem__(self, name: str) -> typing.Union[MetaOapg.additional_properties, schemas.Unset]: ...
def __getitem__(self, name: typing.Union[str, typing.Literal["_class"], ]):
# dict_instance[name] accessor
if not hasattr(self.MetaOapg, 'properties') or name not in self.MetaOapg.properties.__annotations__:
return super().__getitem__(name)
try:
return super().__getitem__(name)
except KeyError:
return schemas.unset
def __new__(
cls,
*args: typing.Union[dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes, ],
_class: typing.Union[MetaOapg.properties._class, str, schemas.Unset] = schemas.unset,
_configuration: typing.Optional[schemas.Configuration] = None,
**kwargs: typing.Union[MetaOapg.additional_properties, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes, ],
) -> 'ClassModel':
return super().__new__(
cls,
*args,
_class=_class,
_configuration=_configuration,
**kwargs,
)
| [
"noreply@github.com"
] | FallenRiteMonk.noreply@github.com |
b3979984927341a8b493352e08d0e64e426032ae | 6ce856e7693c003a85f0e23a239907a87ecb7c89 | /cms/coltrane/models.py | 1b0f837e8209bb9eb3d22d369e6e0b5301f4886f | [] | no_license | wherby/DJango | 4776f0b92b69124be2d213f640fc12e5409f0ee2 | 2545fe7b908e5ef4f6aef2ecca78da77f4d7ccd0 | refs/heads/master | 2018-12-28T13:45:58.851730 | 2014-06-28T18:40:01 | 2014-06-28T18:40:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,484 | py | import datetime
from django.db import models
from django.contrib.auth.models import User
from tagging.fields import TagField
from markdown import markdown
class Category(models.Model):
title = models.CharField(max_length=250,help_text='Maximum 250 characters.')
slug = models.SlugField(unique=True,help_text="Suggested value automatically generated from title. Must be unique")
description = models.TextField()
class Meta:
ordering = ['title']
verbose_name_plural ="Categories"
def __unicode__(self):
return self.title
def get_absolute_url(self):
return "/categories/%s/" % self.slug
class Entry(models.Model):
LIVE_STATUS = 1
DRAFT_STATUS = 2
HIDDEN_STATUS = 3
STATUS_CHOICES = (
(LIVE_STATUS, 'Live'),
(DRAFT_STATUS, 'Draft'),
(HIDDEN_STATUS, 'Hidden'),
)
title = models.CharField(max_length=250,help_text="Maximum 250 characters.")
excerpt = models.TextField(blank=True,help_text="A short summary of the entry. Optional.")
body = models.TextField()
pub_date = models.DateTimeField(default=datetime.datetime.now)
#MetaData
author = models.ForeignKey(User)
enable_comments = models.BooleanField(default=True)
featured = models.BooleanField(default=False)
slug = models.SlugField(unique_for_date='pub_date',
help_text="Suggested value automatically generated from title. Must be unique.")
status = models.IntegerField(choices=STATUS_CHOICES, default=LIVE_STATUS,
help_text="Only entries with live status will be publicly displayed.")
# Categorization.
categories = models.ManyToManyField(Category)
tags = TagField(help_text="Separate tags with spaces.")
#fields to store gereated HTML
excerpt_html = models.TextField(editable=False, blank=True)
body_html = models.TextField(editable=False, blank=True)
class Meta:
verbose_name_plural = "Entries"
ordering = ['-pub_date']
def __unicode__(self):
return self.title
def save(self, force_insert=False, force_update=False):
self.body_html = markdown(self.body)
if self.excerpt:
self.excerpt_html = markdown(self.excerpt)
super(Entry, self).save(force_insert, force_update)
def get_absolute_url(self):
return "/weblog/%s/%s/" %(self.pub_date.strftime("%Y/%b/%d").lower(), self.slug)
| [
"187225577@qq.com"
] | 187225577@qq.com |
1e29a61d5f946cf138297066af08bcfa6884588c | 5e51625b5885f23a10213b373d46900eefd3312c | /torch_sparse/__init__.py | a4b3a38565aca774eb9ed9c0d1e43047d7d096d2 | [
"MIT"
] | permissive | ducptruong/pytorch_sparse | a84c7c8892dfce8a1c7e0048d1a83bea314bd1c1 | 46dac04fd29a9585ed67afe28eaa4c667eabf9e3 | refs/heads/master | 2023-02-19T06:17:50.252560 | 2021-01-19T11:21:52 | 2021-01-19T11:21:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,173 | py | import importlib
import os.path as osp
import torch
__version__ = '0.6.8'
for library in [
'_version', '_convert', '_diag', '_spmm', '_spspmm', '_metis', '_rw',
'_saint', '_sample', '_relabel'
]:
torch.ops.load_library(importlib.machinery.PathFinder().find_spec(
library, [osp.dirname(__file__)]).origin)
if torch.cuda.is_available() and torch.version.cuda: # pragma: no cover
cuda_version = torch.ops.torch_sparse.cuda_version()
if cuda_version == -1:
major = minor = 0
elif cuda_version < 10000:
major, minor = int(str(cuda_version)[0]), int(str(cuda_version)[2])
else:
major, minor = int(str(cuda_version)[0:2]), int(str(cuda_version)[3])
t_major, t_minor = [int(x) for x in torch.version.cuda.split('.')]
if t_major != major:
raise RuntimeError(
f'Detected that PyTorch and torch_sparse were compiled with '
f'different CUDA versions. PyTorch has CUDA version '
f'{t_major}.{t_minor} and torch_sparse has CUDA version '
f'{major}.{minor}. Please reinstall the torch_sparse that '
f'matches your PyTorch install.')
from .storage import SparseStorage # noqa
from .tensor import SparseTensor # noqa
from .transpose import t # noqa
from .narrow import narrow, __narrow_diag__ # noqa
from .select import select # noqa
from .index_select import index_select, index_select_nnz # noqa
from .masked_select import masked_select, masked_select_nnz # noqa
from .permute import permute # noqa
from .diag import remove_diag, set_diag, fill_diag, get_diag # noqa
from .add import add, add_, add_nnz, add_nnz_ # noqa
from .mul import mul, mul_, mul_nnz, mul_nnz_ # noqa
from .reduce import sum, mean, min, max # noqa
from .matmul import matmul # noqa
from .cat import cat # noqa
from .rw import random_walk # noqa
from .metis import partition # noqa
from .bandwidth import reverse_cuthill_mckee # noqa
from .saint import saint_subgraph # noqa
from .padding import padded_index, padded_index_select # noqa
from .sample import sample, sample_adj # noqa
from .convert import to_torch_sparse, from_torch_sparse # noqa
from .convert import to_scipy, from_scipy # noqa
from .coalesce import coalesce # noqa
from .transpose import transpose # noqa
from .eye import eye # noqa
from .spmm import spmm # noqa
from .spspmm import spspmm # noqa
__all__ = [
'SparseStorage',
'SparseTensor',
't',
'narrow',
'__narrow_diag__',
'select',
'index_select',
'index_select_nnz',
'masked_select',
'masked_select_nnz',
'permute',
'remove_diag',
'set_diag',
'fill_diag',
'get_diag',
'add',
'add_',
'add_nnz',
'add_nnz_',
'mul',
'mul_',
'mul_nnz',
'mul_nnz_',
'sum',
'mean',
'min',
'max',
'matmul',
'cat',
'random_walk',
'partition',
'reverse_cuthill_mckee',
'saint_subgraph',
'padded_index',
'padded_index_select',
'to_torch_sparse',
'from_torch_sparse',
'to_scipy',
'from_scipy',
'coalesce',
'transpose',
'eye',
'spmm',
'spspmm',
'__version__',
]
| [
"matthias.fey@tu-dortmund.de"
] | matthias.fey@tu-dortmund.de |
42e36441f9c8ab50c600301228606c0c46780f54 | 60e7738d90ea7151a790a73285382b0c77799262 | /p3/Lib/site-packages/tensorflow/contrib/tpu/profiler/tpu_profiler_pb2.py | bd58b52866b4ab67c06b167cf58b54e6297c13c3 | [
"MIT"
] | permissive | fpark7/Native2Native | 251b3c08af16bbbc4d077840f66aea7acdacc002 | 1bc3390770ddafbba2e2779ba91998643df6d9ec | refs/heads/master | 2021-04-18T21:27:41.378371 | 2018-03-27T02:47:51 | 2018-03-27T02:47:51 | 126,620,375 | 1 | 2 | MIT | 2021-03-19T22:50:00 | 2018-03-24T16:52:28 | Python | UTF-8 | Python | false | true | 10,839 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorflow/contrib/tpu/profiler/tpu_profiler.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorflow.core.framework import graph_pb2 as tensorflow_dot_core_dot_framework_dot_graph__pb2
from tensorflow.core.protobuf import config_pb2 as tensorflow_dot_core_dot_protobuf_dot_config__pb2
from tensorflow.contrib.tpu.profiler import op_profile_pb2 as tensorflow_dot_contrib_dot_tpu_dot_profiler_dot_op__profile__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='tensorflow/contrib/tpu/profiler/tpu_profiler.proto',
package='tensorflow',
syntax='proto3',
serialized_pb=_b('\n2tensorflow/contrib/tpu/profiler/tpu_profiler.proto\x12\ntensorflow\x1a%tensorflow/core/framework/graph.proto\x1a%tensorflow/core/protobuf/config.proto\x1a\x30tensorflow/contrib/tpu/profiler/op_profile.proto\"-\n\x0eProfileOptions\x12\x1b\n\x13include_dataset_ops\x18\x01 \x01(\x08\"r\n\x0eProfileRequest\x12\x13\n\x0b\x64uration_ms\x18\x01 \x01(\x04\x12\x12\n\nmax_events\x18\x02 \x01(\x04\x12\r\n\x05tools\x18\x03 \x03(\t\x12(\n\x04opts\x18\x04 \x01(\x0b\x32\x1a.tensorflow.ProfileOptions\"-\n\x0fProfileToolData\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"\xf6\x01\n\x0fProfileResponse\x12/\n\x11\x63omputation_graph\x18\x02 \x03(\x0b\x32\x14.tensorflow.GraphDef\x12-\n\x0chlo_metadata\x18\x05 \x01(\x0b\x32\x17.tensorflow.RunMetadata\x12\x15\n\rencoded_trace\x18\x03 \x01(\x0c\x12\x36\n\nop_profile\x18\x04 \x01(\x0b\x32\".tensorflow.tpu.op_profile.Profile\x12.\n\ttool_data\x18\x06 \x03(\x0b\x32\x1b.tensorflow.ProfileToolDataJ\x04\x08\x01\x10\x02\x32S\n\x0bTPUProfiler\x12\x44\n\x07Profile\x12\x1a.tensorflow.ProfileRequest\x1a\x1b.tensorflow.ProfileResponse\"\x00\x62\x06proto3')
,
dependencies=[tensorflow_dot_core_dot_framework_dot_graph__pb2.DESCRIPTOR,tensorflow_dot_core_dot_protobuf_dot_config__pb2.DESCRIPTOR,tensorflow_dot_contrib_dot_tpu_dot_profiler_dot_op__profile__pb2.DESCRIPTOR,])
_PROFILEOPTIONS = _descriptor.Descriptor(
name='ProfileOptions',
full_name='tensorflow.ProfileOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='include_dataset_ops', full_name='tensorflow.ProfileOptions.include_dataset_ops', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=194,
serialized_end=239,
)
_PROFILEREQUEST = _descriptor.Descriptor(
name='ProfileRequest',
full_name='tensorflow.ProfileRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='duration_ms', full_name='tensorflow.ProfileRequest.duration_ms', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_events', full_name='tensorflow.ProfileRequest.max_events', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tools', full_name='tensorflow.ProfileRequest.tools', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='opts', full_name='tensorflow.ProfileRequest.opts', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=241,
serialized_end=355,
)
_PROFILETOOLDATA = _descriptor.Descriptor(
name='ProfileToolData',
full_name='tensorflow.ProfileToolData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='tensorflow.ProfileToolData.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data', full_name='tensorflow.ProfileToolData.data', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=357,
serialized_end=402,
)
_PROFILERESPONSE = _descriptor.Descriptor(
name='ProfileResponse',
full_name='tensorflow.ProfileResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='computation_graph', full_name='tensorflow.ProfileResponse.computation_graph', index=0,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hlo_metadata', full_name='tensorflow.ProfileResponse.hlo_metadata', index=1,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='encoded_trace', full_name='tensorflow.ProfileResponse.encoded_trace', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='op_profile', full_name='tensorflow.ProfileResponse.op_profile', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tool_data', full_name='tensorflow.ProfileResponse.tool_data', index=4,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=405,
serialized_end=651,
)
_PROFILEREQUEST.fields_by_name['opts'].message_type = _PROFILEOPTIONS
_PROFILERESPONSE.fields_by_name['computation_graph'].message_type = tensorflow_dot_core_dot_framework_dot_graph__pb2._GRAPHDEF
_PROFILERESPONSE.fields_by_name['hlo_metadata'].message_type = tensorflow_dot_core_dot_protobuf_dot_config__pb2._RUNMETADATA
_PROFILERESPONSE.fields_by_name['op_profile'].message_type = tensorflow_dot_contrib_dot_tpu_dot_profiler_dot_op__profile__pb2._PROFILE
_PROFILERESPONSE.fields_by_name['tool_data'].message_type = _PROFILETOOLDATA
DESCRIPTOR.message_types_by_name['ProfileOptions'] = _PROFILEOPTIONS
DESCRIPTOR.message_types_by_name['ProfileRequest'] = _PROFILEREQUEST
DESCRIPTOR.message_types_by_name['ProfileToolData'] = _PROFILETOOLDATA
DESCRIPTOR.message_types_by_name['ProfileResponse'] = _PROFILERESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ProfileOptions = _reflection.GeneratedProtocolMessageType('ProfileOptions', (_message.Message,), dict(
DESCRIPTOR = _PROFILEOPTIONS,
__module__ = 'tensorflow.contrib.tpu.profiler.tpu_profiler_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.ProfileOptions)
))
_sym_db.RegisterMessage(ProfileOptions)
ProfileRequest = _reflection.GeneratedProtocolMessageType('ProfileRequest', (_message.Message,), dict(
DESCRIPTOR = _PROFILEREQUEST,
__module__ = 'tensorflow.contrib.tpu.profiler.tpu_profiler_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.ProfileRequest)
))
_sym_db.RegisterMessage(ProfileRequest)
ProfileToolData = _reflection.GeneratedProtocolMessageType('ProfileToolData', (_message.Message,), dict(
DESCRIPTOR = _PROFILETOOLDATA,
__module__ = 'tensorflow.contrib.tpu.profiler.tpu_profiler_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.ProfileToolData)
))
_sym_db.RegisterMessage(ProfileToolData)
ProfileResponse = _reflection.GeneratedProtocolMessageType('ProfileResponse', (_message.Message,), dict(
DESCRIPTOR = _PROFILERESPONSE,
__module__ = 'tensorflow.contrib.tpu.profiler.tpu_profiler_pb2'
# @@protoc_insertion_point(class_scope:tensorflow.ProfileResponse)
))
_sym_db.RegisterMessage(ProfileResponse)
_TPUPROFILER = _descriptor.ServiceDescriptor(
name='TPUProfiler',
full_name='tensorflow.TPUProfiler',
file=DESCRIPTOR,
index=0,
options=None,
serialized_start=653,
serialized_end=736,
methods=[
_descriptor.MethodDescriptor(
name='Profile',
full_name='tensorflow.TPUProfiler.Profile',
index=0,
containing_service=None,
input_type=_PROFILEREQUEST,
output_type=_PROFILERESPONSE,
options=None,
),
])
_sym_db.RegisterServiceDescriptor(_TPUPROFILER)
DESCRIPTOR.services_by_name['TPUProfiler'] = _TPUPROFILER
# @@protoc_insertion_point(module_scope)
| [
"felix.park03@gmail.com"
] | felix.park03@gmail.com |
3c6d3a7309e480edacabe02dab04a8023d9c7a66 | 2979f5687b5d34b4885f41062b9b901eee217771 | /meiduo_mall/wc.py | 0dd52ff1ef1cfb39d00f4f75909f501865f38886 | [] | no_license | PierreCastor18/meiduo_mall | d9aa15fa4ec0957f079763a7eb7d2bea5c6aa765 | 8de99e6d232f24cdc8be947ccda8ed536597ec94 | refs/heads/master | 2020-04-17T03:08:30.405499 | 2019-02-25T07:27:11 | 2019-02-25T07:27:11 | 166,168,502 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py | # _*_ coding: utf-8 _*_
__author__ = '其实很简单'
__date__ = '19-1-29 下午8:53'
# 类装饰器
class logger(object):
def __init__(self, func):
self.func = func
def __call__(self, *args, **kwargs):
print('[INFO]: the function {func}() is running...'.format(func=self.func.__name__))
return self.func(*args, **kwargs)
@logger #调用装饰器
def say(something):
print('say {}!'.format(something))
say('hello')
| [
"xwp_fullstack@163.com"
] | xwp_fullstack@163.com |
8ac7a93c8efec3f919c47f1252986d33592d05af | a8769709aeb7299fa3757f0e7bba5c617eb8cfe3 | /lesson-3/k8s/lib/python2.7/site-packages/kubernetes/client/models/v1_volume_node_affinity.py | 1679a08b6ffd499a1bbd967aeb3fa5ae2910d3e8 | [
"Apache-2.0"
] | permissive | simox-83/workshop-k8s | 2ac5e8b282bb7c3337acc726a7d972717bf649cc | 04cb18e8b5925a3cfd84ca316952a6cb64960b31 | refs/heads/master | 2020-03-31T20:52:21.421995 | 2018-10-11T14:43:08 | 2018-10-11T14:43:08 | 152,558,678 | 0 | 0 | Apache-2.0 | 2018-10-11T08:37:20 | 2018-10-11T08:37:20 | null | UTF-8 | Python | false | false | 3,247 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.11.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1VolumeNodeAffinity(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'required': 'V1NodeSelector'
}
attribute_map = {
'required': 'required'
}
def __init__(self, required=None):
"""
V1VolumeNodeAffinity - a model defined in Swagger
"""
self._required = None
self.discriminator = None
if required is not None:
self.required = required
@property
def required(self):
"""
Gets the required of this V1VolumeNodeAffinity.
Required specifies hard node constraints that must be met.
:return: The required of this V1VolumeNodeAffinity.
:rtype: V1NodeSelector
"""
return self._required
@required.setter
def required(self, required):
"""
Sets the required of this V1VolumeNodeAffinity.
Required specifies hard node constraints that must be met.
:param required: The required of this V1VolumeNodeAffinity.
:type: V1NodeSelector
"""
self._required = required
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1VolumeNodeAffinity):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"simone.dandreta@concur.com"
] | simone.dandreta@concur.com |
44924e9a5021295d2b49049fca70b2b8f8d2d91a | a0529a92f73c951bacfe69cc058c53394a9685d4 | /bingads/v12/bulk/entities/ad_extensions/bulk_action_ad_extensions.py | 2f884206ed01e4fa360a57370ec5cf9ec8ceac4a | [
"MIT"
] | permissive | joseftf/BingAds-Python-SDK | 06eda7d23e3141c9fcaee39a3424cf8317d472ed | 205ebf9bdd9701d5d05c5f9ac59702083754f553 | refs/heads/master | 2020-04-29T16:41:06.392989 | 2019-02-17T23:16:17 | 2019-02-17T23:16:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,825 | py | from bingads.v12.internal.bulk.mappings import _SimpleBulkMapping
from bingads.v12.internal.bulk.string_table import _StringTable
from bingads.service_client import _CAMPAIGN_OBJECT_FACTORY_V12
from .common import _BulkAdExtensionBase
from .common import _BulkAdGroupAdExtensionAssociation
from .common import _BulkCampaignAdExtensionAssociation
from .common import _BulkAccountAdExtensionAssociation
from bingads.v12.internal.extensions import *
_ActionAdExtension = type(_CAMPAIGN_OBJECT_FACTORY_V12.create('ActionAdExtension'))
class BulkActionAdExtension(_BulkAdExtensionBase):
""" Represents a action ad extension.
This class exposes the :attr:`action_ad_extension` property that can be read and written
as fields of the Action Ad Extension record in a bulk file.
For more information, see Action Ad Extension at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self, account_id=None, ad_extension=None):
if ad_extension and not isinstance(ad_extension, _ActionAdExtension):
raise ValueError('The type of ad_extension is: {0}, should be: {1}'.format(
type(ad_extension),
'ActionAdExtension'
))
super(BulkActionAdExtension, self).__init__(
account_id=account_id,
ad_extension=ad_extension
)
@property
def action_ad_extension(self):
""" The action ad extension.
see Action Ad Extension at https://go.microsoft.com/fwlink/?linkid=846127.
"""
return self._ad_extension
@action_ad_extension.setter
def action_ad_extension(self, value):
self._ad_extension = value
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.ActionType,
field_to_csv=lambda c: bulk_str(c.action_ad_extension.ActionType),
csv_to_field=lambda c, v: setattr(c.action_ad_extension, 'ActionType', v)
),
_SimpleBulkMapping(
header=_StringTable.FinalUrl,
field_to_csv=lambda c: field_to_csv_Urls(c.action_ad_extension.FinalUrls),
csv_to_field=lambda c, v: csv_to_field_Urls(c.action_ad_extension.FinalUrls, v)
),
_SimpleBulkMapping(
header=_StringTable.FinalMobileUrl,
field_to_csv=lambda c: field_to_csv_Urls(c.action_ad_extension.FinalMobileUrls),
csv_to_field=lambda c, v: csv_to_field_Urls(c.action_ad_extension.FinalMobileUrls, v)
),
_SimpleBulkMapping(
header=_StringTable.TrackingTemplate,
field_to_csv=lambda c: bulk_optional_str(c.action_ad_extension.TrackingUrlTemplate),
csv_to_field=lambda c, v: setattr(c.action_ad_extension, 'TrackingUrlTemplate', v if v else '')
),
_SimpleBulkMapping(
header=_StringTable.Language,
field_to_csv=lambda c: bulk_optional_str(c.action_ad_extension.Language),
csv_to_field=lambda c, v: setattr(c.action_ad_extension, 'Language', v if v else '')
),
_SimpleBulkMapping(
header=_StringTable.CustomParameter,
field_to_csv=lambda c: field_to_csv_UrlCustomParameters(c.action_ad_extension),
csv_to_field=lambda c, v: csv_to_field_UrlCustomParameters(c.action_ad_extension, v)
)
]
def process_mappings_from_row_values(self, row_values):
self.action_ad_extension = _CAMPAIGN_OBJECT_FACTORY_V12.create('ActionAdExtension')
self.action_ad_extension.Type = 'ActionAdExtension'
super(BulkActionAdExtension, self).process_mappings_from_row_values(row_values)
row_values.convert_to_entity(self, BulkActionAdExtension._MAPPINGS)
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self.action_ad_extension, 'action_ad_extension')
super(BulkActionAdExtension, self).process_mappings_to_row_values(row_values, exclude_readonly_data)
self.convert_to_values(row_values, BulkActionAdExtension._MAPPINGS)
class BulkAccountActionAdExtension(_BulkAccountAdExtensionAssociation):
""" Represents an account level action ad extension.
This class exposes properties that can be read and written
as fields of the Account Action Ad Extension record in a bulk file.
For more information, see Account Action Ad Extension at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
pass
class BulkCampaignActionAdExtension(_BulkCampaignAdExtensionAssociation):
""" Represents a campaign level action ad extension.
This class exposes properties that can be read and written
as fields of the Campaign Action Ad Extension record in a bulk file.
For more information, see Campaign Action Ad Extension at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
pass
class BulkAdGroupActionAdExtension(_BulkAdGroupAdExtensionAssociation):
""" Represents an ad group level Action ad extension.
This class exposes properties that can be read and written
as fields of the Ad Group Action Ad Extension record in a bulk file.
For more information, see Ad Group Action Ad Extension at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
pass
| [
"qitia@microsoft.com"
] | qitia@microsoft.com |
33be986bba891a0a1751bb25973dd5564f906a7a | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2850/60691/295113.py | 2dc388fb247c3a9fc4edd3c03a2977e028026556 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 873 | py | def countone(l):
count = 0
for i in range(len(l)):
if l[i] == 1:
count += 1
return count
def convert(l, start, k):
for i in range(start, start+k):
if l[i] == 0:
l[i] = 1
else:
l[i] = 0
return countone(l)
def reverse(s, k):
num = []
for i in range(len(s)-k+1):
temp = []
for m in range(len(s)):
temp.append(int(s[m]))
num.append(convert(temp, i, k))
return max(num)
def countzero(l):
count = 0
for i in range(len(l)):
if l[i] == '1':
count += 1
return count
n = int(input())
s = input().split(' ')
l = []
for i in range(len(s)):
l.append(s[i])
if countzero(l) == 0:
print(len(l))
else:
nums = []
for i in range(len(l)):
nums.append(reverse(''.join(l), i))
print(max(nums))
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
1f2a9f12b8e5de8db05890b9ce735d679dd4f163 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/binaryTree2_20200617161514.py | bafbac4ed0817fa72ac432e3542d2c469ac9291f | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,011 | py | # Create a node and assign a value to the node
# A tree node contains data then pointer to left child and pointer to right child
class Node:
def __init__(self,data):
# designate one node as root
self.data = data
# then the two others as child nodes
self.left = None
self.right = None
def inorder(root,newArr):
if root:
# Traverse left
inorder(root.left,newArr)
newArr.append(root.data)
inorder(root.right,newArr)
print(newArr)
return newArr
def morris_traversal(root):
# function for iterative inorder tree traversal
current = root
while current is not None:
# do the following
if current.left is None:
yield current.data
else:
# find the current in order
root = Node(1)
root.left = Node(2)
root.right = Node(3)
root.left.right = Node(4)
root.left.left = Node(7)
print(inorder(root,[]))
| [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
6938bb062f1e075b98f9af371123bfd4137ec234 | 78d35bb7876a3460d4398e1cb3554b06e36c720a | /sdk/keyvault/azure-mgmt-keyvault/azure/mgmt/keyvault/v2021_06_01_preview/aio/_configuration.py | da6ab06618bea61e3ca4a9404926ddd3eacad877 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | catchsrinivas/azure-sdk-for-python | e35f59b60318a31b3c940a7a3a07b61b28118aa5 | 596227a7738a5342274486e30489239d539b11d1 | refs/heads/main | 2023-08-27T09:08:07.986249 | 2021-11-11T11:13:35 | 2021-11-11T11:13:35 | 427,045,896 | 0 | 0 | MIT | 2021-11-11T15:14:31 | 2021-11-11T15:14:31 | null | UTF-8 | Python | false | false | 3,336 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class KeyVaultManagementClientConfiguration(Configuration):
"""Configuration for KeyVaultManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Subscription credentials which uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(KeyVaultManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2021-06-01-preview"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-keyvault/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| [
"noreply@github.com"
] | catchsrinivas.noreply@github.com |
c6386ec80ef31a7ea69b2b6b7ac82d1d28f0976f | 78d35bb7876a3460d4398e1cb3554b06e36c720a | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_12_01/operations/_public_ip_prefixes_operations.py | 9ea8a2ed3ce9e6c19bf905dc237500891e5a53ed | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | catchsrinivas/azure-sdk-for-python | e35f59b60318a31b3c940a7a3a07b61b28118aa5 | 596227a7738a5342274486e30489239d539b11d1 | refs/heads/main | 2023-08-27T09:08:07.986249 | 2021-11-11T11:13:35 | 2021-11-11T11:13:35 | 427,045,896 | 0 | 0 | MIT | 2021-11-11T15:14:31 | 2021-11-11T15:14:31 | null | UTF-8 | Python | false | false | 27,401 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class PublicIPPrefixesOperations(object):
"""PublicIPPrefixesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
public_ip_prefix_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
public_ip_prefix_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified public IP prefix.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_prefix_name: The name of the PublicIpPrefix.
:type public_ip_prefix_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
public_ip_prefix_name=public_ip_prefix_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
public_ip_prefix_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.PublicIPPrefix"
"""Gets the specified public IP prefix in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_prefix_name: The name of the public IP prefix.
:type public_ip_prefix_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPPrefix, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.PublicIPPrefix
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefix"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
public_ip_prefix_name, # type: str
parameters, # type: "_models.PublicIPPrefix"
**kwargs # type: Any
):
# type: (...) -> "_models.PublicIPPrefix"
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefix"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'PublicIPPrefix')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
public_ip_prefix_name, # type: str
parameters, # type: "_models.PublicIPPrefix"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.PublicIPPrefix"]
"""Creates or updates a static or dynamic public IP prefix.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_prefix_name: The name of the public IP prefix.
:type public_ip_prefix_name: str
:param parameters: Parameters supplied to the create or update public IP prefix operation.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.PublicIPPrefix
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either PublicIPPrefix or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.PublicIPPrefix]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefix"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
public_ip_prefix_name=public_ip_prefix_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
public_ip_prefix_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.PublicIPPrefix"
"""Updates public IP prefix tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_prefix_name: The name of the public IP prefix.
:type public_ip_prefix_name: str
:param parameters: Parameters supplied to update public IP prefix tags.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPPrefix, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.PublicIPPrefix
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefix"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpPrefixName': self._serialize.url("public_ip_prefix_name", public_ip_prefix_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPPrefix', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes/{publicIpPrefixName}'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PublicIPPrefixListResult"]
"""Gets all the public IP prefixes in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPPrefixListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.PublicIPPrefixListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefixListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPPrefixListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/publicIPPrefixes'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PublicIPPrefixListResult"]
"""Gets all public IP prefixes in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPPrefixListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.PublicIPPrefixListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPPrefixListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPPrefixListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPPrefixes'} # type: ignore
| [
"noreply@github.com"
] | catchsrinivas.noreply@github.com |
07fadb59835412186c216ed94671aaf72d7c9b8e | e8b291e05b200832b2f3e1a94ab66c225883cc98 | /authentication/serializers.py | f791f7b68bca3c452d3326955e4c4c4f61d04c45 | [] | no_license | Ramesh7128/emailwatch | ffaa22cd712dede2b68b10c3cfd58675a2c9e379 | bf150d1587c7874019f753da5d19dfd2636fb540 | refs/heads/master | 2023-01-09T10:23:15.354834 | 2019-04-24T11:40:40 | 2019-04-24T11:40:40 | 180,546,082 | 0 | 0 | null | 2023-01-04T15:08:03 | 2019-04-10T09:16:03 | JavaScript | UTF-8 | Python | false | false | 3,505 | py | from rest_framework import serializers
from authentication.models import User
from django.contrib.auth import authenticate
class SocialRegisterationLoginSerializer(serializers.Serializer):
"""
Seraializer for reqisteration/login a new user.
"""
access_token = serializers.CharField(
allow_blank=False,
trim_whitespace=True,
write_only=True
)
refresh_token = serializers.CharField(
allow_blank=False,
trim_whitespace=True,
write_only=True
)
email = serializers.CharField()
token = serializers.CharField(allow_blank=True, read_only=True)
username = serializers.CharField(allow_blank=True, read_only=True)
def create(self, validated_data):
# validate the token sent with the email sent.
print(validated_data, 'inside serializer')
return User.objects.creat_social_user(**validated_data)
class RegisterationSerializer(serializers.ModelSerializer):
"""
Serializer for registeration request and create a new user.
"""
password = serializers.CharField(
max_length=200,
min_length=8,
write_only=True
)
token = serializers.CharField(max_length=200, read_only=True)
class Meta:
model = User
fields = ['email', 'username', 'password', 'token']
def create(self, validated_data):
# use the create user method we wrote earlier to create a new_user.
return User.objects.create_user(**validated_data)
class LoginSerializer(serializers.Serializer):
email = serializers.CharField(max_length=255)
username = serializers.CharField(max_length=255, read_only=True)
password = serializers.CharField(max_length=128, write_only=True)
token = serializers.CharField(max_length=255, read_only=True)
def validate(self, data):
# The 'validate' method is where we make sure that the user.
# the validate method is where we make sure that the user is a valid user.
email = data.get('email', None)
password = data.get('password', None)
if email is None:
raise serializers.ValidationError(
'Email field is required to log in')
if password is None:
raise serializers.ValidationError(
'password field is required to log in')
user = authenticate(username=email, password=password)
if user is None:
raise serializers.ValidationError(
'User credentials not matching')
if not user.is_active:
raise serializers.ValidationError('User has been deactivated')
return {
'email': user.email,
'username': user.username,
'token': user.token
}
class UserSerializer(serializers.ModelSerializer):
"""
Handles serialization and deserialization of user objects.
"""
password = serializers.CharField(
max_length=128, min_length=8, write_only=True)
class Meta:
model = User
fields = ['username', 'email', 'password']
# read_only_fields = ('token',)
def update(self, instance, validated_data):
"""
performs an update on User instance.
"""
password = validated_data.pop('password', None)
for (key, value) in validated_data.items():
setattr(instance, key, value)
if password is not None:
instance.set_password(password)
instance.save()
return instance
| [
"ramesh7128@gmail.com"
] | ramesh7128@gmail.com |
6838f5d83e2e305daf8e7e1d283c3de9d6b2e773 | 69698589b1962608cb0fa6099aeafd8eb263ddc4 | /降维/PCA降维/利用PCA对半导体制造数据降维.py | 955d949e1f9fee3aaf7eb119ffd801bcb70b0f37 | [] | no_license | haibiyu/Machine-Learning-Action | de7e18aeb31c48340566d1ab4550d4864094b409 | 3ff16b6881393c1230f5c06dba632651e5d444e0 | refs/heads/master | 2022-07-01T13:47:15.923282 | 2020-05-11T09:49:44 | 2020-05-11T09:49:44 | 259,905,572 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,464 | py | # !/usr/bin/env python
# -*-coding:utf-8 -*-
"""
# File : 利用PCA对半导体制造数据降维.py
# Time :2020/3/14 21:45
# Author :haibiyu
# version :python 3.6
# Description:
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
matplotlib.rcParams['font.sans-serif'] = [u'SimHei']
matplotlib.rcParams['axes.unicode_minus'] = False
def load_data_set(file_name,delim='\t'):
"""
获取数据
:param file_name: 文件路径
:param delim: 分隔符
:return: 返回处理后的数据
"""
fr = open(file_name)
string_arr = [line.strip().split(delim) for line in fr.readlines()]
data_arr = [np.array(line).astype(float) for line in string_arr]
return np.mat(data_arr)
def replace_nan_with_mean():
"""
将数据中NaN值替换为平均值
"""
data_mat = load_data_set('./半导体制造数据/secom.data',' ')
numFeat = data_mat.shape[1]
for i in range(numFeat):
# 计算所有非NaN的平均值
mean_val = np.mean(data_mat[np.nonzero(~np.isnan(data_mat[:, i].A))[0], i])
# 将所有NaN值设置为平均值
data_mat[np.nonzero(np.isnan(data_mat[:,i].A))[0], i] = mean_val
return data_mat
def pca(data_mat, variance_ratio=0.99):
"""
利用PCA对数据进行降维,获取降维后的数据和重构后的数据
:param data_mat: 原始数据,m*n的矩阵
:param top_k_feat: 需要降到的维度数
:return:
"""
mean_vals = np.mean(data_mat, axis=0)
mean_removed = (data_mat - mean_vals) # 去均值化
cov_mat = np.cov(mean_removed, rowvar=0) # 计算协方差矩阵 n*n
# 通常用奇异值分解SVD 代替 特征值分解eig
U, S, V = np.linalg.svd(cov_mat) # 获得SVD后的 U(n*n)、S(n*n)、V(n*n),特征值S已降序排列
# 获取保留方差99%的最小维度数top_k_feat
top_k_feat = get_top_k_feat(S, variance_ratio)
print("降维后保留方差{}的最小维度数为:{}".format(variance_ratio,top_k_feat))
plot_top_variance_ratio(S, top_k_feat)
red_vects = U[:, :top_k_feat] # 取前top_k_feat列的特征向量
red_data_mat = mean_removed * red_vects # 将原始数据转换到降维后的空间上
recon_mat = red_data_mat * red_vects.T + mean_vals # 重构原始数据
return red_data_mat, recon_mat
def get_top_k_feat(eig_values,variance_ratio=0.99):
"""
根据variance_ratio确定保留的特征数
:param eig_values: 特征值,从大到小排序
:param variance_ratio: 主成分的方差和所占的最小比例阈值
:return:
"""
sum_S = float(np.sum(eig_values))
curr_S = 0
for i in range(len(eig_values)):
curr_S += float(eig_values[i])
if curr_S / sum_S >= variance_ratio:
return i + 1
def plot_top_variance_ratio(eigvalues,k):
"""
绘制前k个主成分占总方差的百分比
:param eigvalues:特征值
:param k:降维后的维度数目
"""
plt.plot(np.arange(1, k+1), eigvalues[:k] / np.sum(eigvalues) * 100,'o-')
plt.xlabel("主成分数目")
plt.ylabel("方差的百分比")
plt.xlim(0, k)
plt.ylim(0,)
plt.title("前{}个主成分占总方差的百分比".format(k))
plt.show()
if __name__ == '__main__':
# 对数据进行处理
data_mat = replace_nan_with_mean()
# 获取降维后的数据和重构后的数据
red_data_mat, recon_mat = pca(data_mat,0.99)
| [
"haibiyu@163.com"
] | haibiyu@163.com |
45911d2107f8a058ae9e21da641dce20f9ebbfc4 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03478/s738324430.py | 8d0f54df40a8cd3400f6e4a4451aea2c0ae3acda | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 264 | py | def calc_sum_digits(n):
sumdigit = 0
while n > 0:
sumdigit += n % 10
n //= 10
return sumdigit
N, A, B = map(int, input().split())
result = 0
for n in range(1, N+1):
if A <= calc_sum_digits(n) <= B:
result += n
print(result) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
6c9ffa106c3d482bebc5f45b5ab810671a452d45 | 70fec09ceb625608d561937955c285c0c39f6d95 | /tomodachi/helpers/logging.py | 6f70f688795efb29e251f11cf8b1df6ce6a5f8f4 | [
"MIT"
] | permissive | kalaspuff/tomodachi | b285e2c73696d14e3c84a479745e00824fba7190 | deca849ec2b4cdc3d27f06e9ce0056fac0146a1a | refs/heads/master | 2023-08-31T00:32:12.042486 | 2023-08-21T13:02:24 | 2023-08-21T13:02:24 | 62,165,703 | 191 | 28 | MIT | 2023-09-11T23:32:51 | 2016-06-28T18:43:51 | Python | UTF-8 | Python | false | false | 1,787 | py | from typing import Any
from tomodachi import context, logging
def log(service: Any, *args: Any, **kwargs: Any) -> None:
name: str = context("service.logger") or ""
level = None
message = None
if len(args) == 1:
message = args[0]
if len(args) == 2:
if type(args[0]) is int:
level = args[0]
elif type(args[0]) is str and str(args[0]).upper() in (
"NOTSET",
"DEBUG",
"INFO",
"WARN",
"WARNING",
"ERROR",
"FATAL",
"CRITICAL",
):
level = getattr(logging, str(args[0]).upper())
else:
name = args[0]
message = args[1]
if len(args) == 3:
name = args[0]
level = int(args[1]) if type(args[1]) is int else getattr(logging, str(args[1]).upper())
message = args[2]
if "level" in kwargs:
level = 0
level_ = kwargs.pop("level", 0)
if type(level_) is int:
level = int(level_)
else:
level = int(getattr(logging, str(level_).upper()))
if "lvl" in kwargs:
level = 0
level_ = kwargs.pop("lvl", 0)
if type(level_) is int:
level = int(level_)
else:
level = int(getattr(logging, str(level_).upper()))
if "name" in kwargs:
name = kwargs.pop("name", None) or ""
if not message and "message" in kwargs:
message = kwargs.pop("message", None)
if not message and "msg" in kwargs:
message = kwargs.pop("msg", None)
if not level:
level = logging.INFO
if not name:
name = context("service.logger")
if not message:
message = ""
logging.getLogger(name or None).log(level, message, **kwargs)
| [
"hello@carloscar.com"
] | hello@carloscar.com |
efc6859534ba9f8e3297cf25b6bbc2abcabe1ed2 | 3c59b7bde01cfbc1fbd170883393e8ebf7a0a92f | /프로그래머스/최댓값과 최솟값.py | 8196eba33a44dc86ee4bf59bb4683942a9de6c80 | [] | no_license | gf234/python_problem_solving | 93ae00d940091131d8f8b06e478e385e4c2a4503 | 4c95751f5a687215c14bf61c37e6dc2e7e752342 | refs/heads/main | 2023-05-10T07:28:12.351006 | 2021-06-14T04:59:33 | 2021-06-14T04:59:33 | 314,479,583 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | def solution(s):
nums = list(map(int, s.split()))
nums.sort()
return str(nums[0]) + ' ' + str(nums[-1]) | [
"gf265@naver.com"
] | gf265@naver.com |
2d3beeff7f88a9ebdfa69239ba98dea009416491 | 49c174fa2363461bbefd07af08f2d62b2d12b591 | /robots/LoCoBot/locobot_calibration/scripts/artag_camera.py | 84c6517cf003f248e2a81c48df3d658a9cf2e2d7 | [
"MIT"
] | permissive | Improbable-AI/pyrobot | 452f68ca503fb4aff247d6166cff8914471fa9d8 | 326b49057421ae7d5feefdca93b580846aaef730 | refs/heads/master | 2022-07-07T20:58:23.358744 | 2022-04-01T22:02:39 | 2022-04-01T22:02:39 | 221,547,610 | 1 | 2 | MIT | 2022-04-04T20:24:45 | 2019-11-13T20:41:11 | Python | UTF-8 | Python | false | false | 895 | py | # Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import threading
import rospy
import copy
from ar_track_alvar_msgs.msg import AlvarMarkers
ROSTOPIC_AR_POSE_MARKER = '/ar_pose_marker'
class ARTagCamera(object):
def __init__(self, configs):
self.ar_tag_pose = None
self.ar_tag_lock = threading.RLock()
rospy.Subscriber(
ROSTOPIC_AR_POSE_MARKER,
AlvarMarkers,
self.alvar_callback)
def alvar_callback(self, msg):
self.ar_tag_lock.acquire()
self.ar_tag_pose = msg
self.ar_tag_lock.release()
def get_ar_tag_pose(self):
self.ar_tag_lock.acquire()
ar_tag_pose = copy.deepcopy(self.ar_tag_pose)
self.ar_tag_lock.release()
return ar_tag_pose
| [
"kalyan051993@gmail.com"
] | kalyan051993@gmail.com |
29c085f9787e37d6f79717df659b17a72d8ec18d | 13b5372316dd8a47c7dfe9abf43839f4bc61ba9d | /mysite/settings.py | ec1a2a6092dbf30e5285c63b399ab1e61cf1bc62 | [] | no_license | YaCpotato/Django-RFID-register-API | 6b7db5a07ca0ac182645ac47436aed37006e7ac3 | a36e680ccdf8f80d5d9c21c5ab6d5cae0547c74a | refs/heads/master | 2020-08-05T18:00:56.197117 | 2019-10-07T16:34:04 | 2019-10-07T16:34:04 | 212,646,277 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,221 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.0.13.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'p-$50yl%^e%k4uf^01+2z4l^q2kmud8++8kvohc*n6e82!(_07'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', '.pythonanywhere.com','localhost']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'logsys',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'ja'
TIME_ZONE = 'Asia/Tokyo'
USE_TZ = True
AUTH_USER_MODEL = "logsys.User"
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
| [
"yasshisshy@gmail.com"
] | yasshisshy@gmail.com |
1703fd234063fecf90463ba8e557062e7b98db89 | a560ad8d5f523b720b47f0be27c2fdba232a3a4b | /src/configuration/config_p.py | f44293b7d749ac53b0ed151d187db84a655dd68f | [
"MIT"
] | permissive | vollov/py-lab | a27cb422e5a4ac44d4364c89e98202207cd2a1d5 | 0a1a3c93c5decaa5246fab981bcc2563cc42c6d0 | refs/heads/master | 2021-06-01T13:25:51.829046 | 2021-01-23T16:39:08 | 2021-01-23T16:39:08 | 33,277,424 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | import ConfigParser,ast,os
class configp():
def __init__(self):
current_directory = os.path.dirname(os.path.abspath(__file__))
self._config_file_path = os.path.join(current_directory, 'conf.ini')
self.config = ConfigParser.ConfigParser()
self.config.read(self._config_file_path)
def get(self, section,option):
return self.config.get(section, option)
def test():
con = configp()
print con.get('My Section','foodir')
if __name__=='__main__':test() | [
"dike.zhang@gmail.com"
] | dike.zhang@gmail.com |
9db0cafb8a56ca93e6c9a4097abf460afe3f71e3 | 90419da201cd4948a27d3612f0b482c68026c96f | /sdk/python/pulumi_azure_nextgen/authorization/v20150701/_inputs.py | 3caabfd0cd01b9038f1146d2ea779d3cf6007169 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | test-wiz-sec/pulumi-azure-nextgen | cd4bee5d70cb0d332c04f16bb54e17d016d2adaf | 20a695af0d020b34b0f1c336e1b69702755174cc | refs/heads/master | 2023-06-08T02:35:52.639773 | 2020-11-06T22:39:06 | 2020-11-06T22:39:06 | 312,993,761 | 0 | 0 | Apache-2.0 | 2023-06-02T06:47:28 | 2020-11-15T09:04:00 | null | UTF-8 | Python | false | false | 3,337 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'PermissionArgs',
'RoleAssignmentPropertiesArgs',
]
@pulumi.input_type
class PermissionArgs:
def __init__(__self__, *,
actions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
not_actions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Role definition permissions.
:param pulumi.Input[Sequence[pulumi.Input[str]]] actions: Allowed actions.
:param pulumi.Input[Sequence[pulumi.Input[str]]] not_actions: Denied actions.
"""
if actions is not None:
pulumi.set(__self__, "actions", actions)
if not_actions is not None:
pulumi.set(__self__, "not_actions", not_actions)
@property
@pulumi.getter
def actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed actions.
"""
return pulumi.get(self, "actions")
@actions.setter
def actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "actions", value)
@property
@pulumi.getter(name="notActions")
def not_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Denied actions.
"""
return pulumi.get(self, "not_actions")
@not_actions.setter
def not_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "not_actions", value)
@pulumi.input_type
class RoleAssignmentPropertiesArgs:
def __init__(__self__, *,
principal_id: pulumi.Input[str],
role_definition_id: pulumi.Input[str]):
"""
Role assignment properties.
:param pulumi.Input[str] principal_id: The principal ID assigned to the role. This maps to the ID inside the Active Directory. It can point to a user, service principal, or security group.
:param pulumi.Input[str] role_definition_id: The role definition ID used in the role assignment.
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "role_definition_id", role_definition_id)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> pulumi.Input[str]:
"""
The principal ID assigned to the role. This maps to the ID inside the Active Directory. It can point to a user, service principal, or security group.
"""
return pulumi.get(self, "principal_id")
@principal_id.setter
def principal_id(self, value: pulumi.Input[str]):
pulumi.set(self, "principal_id", value)
@property
@pulumi.getter(name="roleDefinitionId")
def role_definition_id(self) -> pulumi.Input[str]:
"""
The role definition ID used in the role assignment.
"""
return pulumi.get(self, "role_definition_id")
@role_definition_id.setter
def role_definition_id(self, value: pulumi.Input[str]):
pulumi.set(self, "role_definition_id", value)
| [
"public@paulstack.co.uk"
] | public@paulstack.co.uk |
92248a35db9e68515e4e085e21c1b7010262f4c0 | 7daab7f2e91d62ba0383fa050f3dea1dc9752975 | /iniciante/1066_pares_impares_positivos_e_negativos.py | 8b84148a9a7da0955c6dade7d456691e2be198db | [] | no_license | luandadantas/URI-Python | 97ccdaa3835b2d2fa403f148969ca7e893d3f119 | 2cb67f39725b20e6fcbbeaf27d04c4ba05dba665 | refs/heads/master | 2022-12-04T02:51:14.374361 | 2020-08-14T17:59:58 | 2020-08-14T17:59:58 | 255,736,816 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 600 | py | valor1 = int(input())
valor2 = int(input())
valor3 = int(input())
valor4 = int(input())
valor5 = int(input())
positivos = 0
negativos = 0
pares = 0
impares = 0
for valor in [valor1, valor2, valor3, valor4, valor5]:
if valor > 0:
positivos += 1
if valor < 0:
negativos += 1
if valor % 2 == 0:
pares += 1
if valor % 2 == 1:
impares += 1
print("{} valor(es) par(es)".format(pares))
print("{} valor(es) impar(es)".format(impares))
print("{} valor(es) positivo(s)".format(positivos))
print("{} valor(es) negativo(s)".format(negativos)) | [
"ludanttas@gmail.com"
] | ludanttas@gmail.com |
862d8a3ca39f798075f14ad549a35b883b69cf4e | fd1a6a8c27f3f7d91a1fa4a4914181f8ae0fd795 | /易中标js破解(中低)/yibiaoparse.py | 4b09382fa0e618bbb5b3f816d47e55860309f0a7 | [] | no_license | heyanglin/js- | 078fdaa7892dbe8a94d2965e3fd700c205e7a8ee | 8c0b36b2df9c942f0c590c21e6696ab75de2a3a0 | refs/heads/master | 2020-07-05T14:24:56.565195 | 2019-12-19T04:26:51 | 2019-12-19T04:26:51 | 202,672,163 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,336 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
# author:Administrator
# datetime:2019/6/19 10:57
# software: PyCharm
import execjs
import requests
import json
import os,io
os.environ["EXECJS_RUNTIME"] = "Node"
#生成js运行文件
with open('2.js','r') as f:
jss = f.read()
# print(jss)
fun = execjs.compile(jss)
page= """{"typeNum":0,"limit":20,"start":16,"title":""}"""
# 得到salt,data的值
salt = fun.call('r')
data = fun.call('o',page,salt)
post_data={
'salt':salt,
'data':data,
}
print(post_data)
#开始请求
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded',
# 'Authorization': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyTm8iOiJ0ZXN0X3BlcnNvbjIiLCJleHAiOjE1NjI0MDY1NTEsImlhdCI6MTU2MjMyMDE1MX0.AF9mJnJjMOhoIIiXXOXHhrlGuH0T6cgF4EwUan6W49s',
'Origin': 'http://192.168.1.33:8888',
'Referer': 'http://192.168.1.33:8888/',
}
url = 'http://www.ebidwin.cn/ow/bidInfo/bidResultList'
resp = requests.post(url,headers=headers,data=post_data)
# print(resp)
jsons = json.loads(resp.text)
res_salt = jsons['data']['salt']
res_data = jsons['data']['data']
#解密
result = fun.call('a',res_data,res_salt)
# print(requests.utils.unquote(result))
print(result)
| [
"someone@someplace.com"
] | someone@someplace.com |
903350f5c6ddd7650a4bd924a792990d0418aa25 | 7c06ff01f631cac78aa7c47f2af8237d140eab72 | /maa/atlas/export_predictor_website.py | 1324995579209c7185c1d193b17d4575ecfc0531 | [
"MIT"
] | permissive | iosonofabio/maa | 563df2e61ee23b128e20c60740761a0a75706ea4 | 072892e1cb7b8e48e9ffc335d57d508bf1362f7e | refs/heads/master | 2020-03-30T15:06:15.457548 | 2018-10-02T23:51:27 | 2018-10-02T23:51:27 | 151,348,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,780 | py | # vim: fdm=indent
'''
author: Fabio Zanini
date: 22/11/17
content: Merge scattering and antibody-based predictors.
'''
# Modules
import os
import sys
import argparse
import yaml
import numpy as np
import pandas as pd
from scipy.stats import spearmanr
from sklearn.externals import joblib
import json
import matplotlib.pyplot as plt
import seaborn as sns
# Functions
class CombinedClassifier:
def __init__(self, classifiers, logic='AND'):
self.classifiers = classifiers
self.logic = logic
def predict(self, Xs):
y = (self.classifiers[0].predict(Xs[0]) > 0)
if len(self.classifiers) > 1:
if self.logic == 'AND':
for clf, X in zip(self.classifiers[1:], Xs[1:]):
y &= (clf.predict(X) > 0)
elif self.logic == 'OR':
for clf, X in zip(self.classifiers[1:], Xs[1:]):
y |= (clf.predict(X) > 0)
else:
raise ValueError('Combination logic not understood: {}'.format(self.logic))
return (y * 2) - 1
def parse_biolegend():
fn = '../../data/ab_vendors/Biolegend.tsv'
df = pd.read_csv(fn, sep='\t')
if 'GeneName' not in df.columns:
df.to_csv(fn+'.bak', sep='\t', index=False)
from collections import Counter
fn_conv = '../../data/ab_vendors/Biolegend_markers_conversion.tsv'
df_conv = pd.read_csv(fn_conv, sep='\t')
n_entries = Counter(df_conv['Input'].values)
multiples = [k for (k, v) in n_entries.items() if v > 1]
if len(multiples):
print('Multiple entries:', multiples)
raise ValueError('Some antibody target names have multiple entries')
df_conv.set_index('Input', inplace=True)
df['GeneName'] = ''
newcols = df.columns[:2].tolist() + ['GeneName'] + df.columns[2:].tolist()
df = df.loc[:, newcols]
for k, datum in df.iterrows():
if datum['Specificity'] in df_conv.index:
df.loc[k, 'GeneName'] = df_conv.loc[datum['Specificity'], 'Symbol']
df.to_csv(fn, sep='\t', index=False)
print('New file saved to file')
df.iloc[:, 3:] = (df.iloc[:, 3:] == '•')
return df
def plot_classifier_antibody(X, y, clf, ax=None):
'''Plot SVM classifier'''
if ax is None:
fig, ax = plt.subplots()
colors = plt.cm.Paired([0.0, 1.0])
colors[0, -1] = 0.4
colors[1, -1] = 0.7
c = np.zeros((len(y), 4))
c[y == 0] = colors[0]
c[y == 1] = colors[1]
if X.shape[1] == 1:
from scipy.optimize import minimize_scalar
def fun(x, offset):
return (clf.decision_function([[x]])[0] - offset)**2
discr = minimize_scalar(fun, args=(0,), bounds=[0, 6]).x
dis_low = minimize_scalar(fun, args=(-0.5,), bounds=[0, 6]).x
dis_high = minimize_scalar(fun, args=(+0.5,), bounds=[0, 6]).x
df = pd.DataFrame([X[:, 0], y], index=['x', 'identity']).T
sns.swarmplot(
x='x', y='identity', data=df, ax=ax,
orient='h',
alpha=0.7,
)
ax.axvline(discr)
ax.axvline(dis_low, ls='--')
ax.axvline(dis_high, ls='--')
else:
ax.scatter(
X[:, 0], X[:, 1],
color=c,
zorder=10,
s=20)
x_min = X[:, 0].min()
x_max = X[:, 0].max()
y_min = X[:, 1].min()
y_max = X[:, 1].max()
XX, YY = np.mgrid[x_min:x_max:200j, y_min:y_max:200j]
Z = clf.decision_function(np.c_[XX.ravel(), YY.ravel()])
# Put the result into a color plot
Z = Z.reshape(XX.shape)
ax.pcolormesh(XX, YY, Z > 0, cmap=plt.cm.Paired, alpha=0.05)
ax.contour(XX, YY, Z, colors=['k', 'k', 'k'],
linestyles=['--', '-', '--'], levels=[-.5, 0, .5])
return ax
def plot_classifier_scattering(X, y, clf, ax=None):
'''Plot SVM classifier'''
if ax is None:
fig, ax = plt.subplots()
colors = plt.cm.Paired([0.0, 1.0])
colors[0, -1] = 0.4
colors[1, -1] = 0.7
c = np.zeros((len(y), 4))
c[y == 0] = colors[0]
c[y == 1] = colors[1]
if X.shape[1] == 1:
from scipy.optimize import minimize_scalar
def fun(x, offset):
return (clf.decision_function([[x]])[0] - offset)**2
discr = minimize_scalar(fun, args=(0,), bounds=[0, 6]).x
dis_low = minimize_scalar(fun, args=(-0.5,), bounds=[0, 6]).x
dis_high = minimize_scalar(fun, args=(+0.5,), bounds=[0, 6]).x
df = pd.DataFrame([X[:, 0], y], index=['x', 'identity']).T
sns.swarmplot(
x='x', y='identity', data=df, ax=ax,
orient='h',
alpha=0.7,
)
ax.axvline(discr)
ax.axvline(dis_low, ls='--')
ax.axvline(dis_high, ls='--')
else:
ax.scatter(
X[:, 0], X[:, 1],
color=c,
zorder=10,
s=20)
x_min = X[:, 0].min()
x_max = X[:, 0].max()
y_min = X[:, 1].min()
y_max = X[:, 1].max()
XX, YY = np.mgrid[x_min:x_max:200j, y_min:y_max:200j]
Z = clf.decision_function(np.c_[XX.ravel(), YY.ravel()])
# Put the result into a color plot
Z = Z.reshape(XX.shape)
ax.pcolormesh(XX, YY, Z > 0, cmap=plt.cm.Paired, alpha=0.05)
ax.contour(XX, YY, Z, colors=['k', 'k', 'k'],
linestyles=['--', '-', '--'], levels=[-.5, 0, .5])
return ax
# Script
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('tissues', nargs='+',
help='tissues to study')
parser.add_argument('--cell-types', nargs='+', required=True,
help='Limit to some cell types')
parser.add_argument('--subtissue', default=None,
help='Limit to a subtissue. To split by subtissue use "all"')
parser.add_argument('--save', action='store_true',
help='Store to file instead of showing')
parser.add_argument('--save-website', action='store_true',
help='Save result for the website JSON)')
parser.add_argument('--combination-logic', default='AND',
choices=['AND', 'OR'],
help='Combination logic between scattering and antibodies')
args = parser.parse_args()
# Get the list of commercially available antibodies
ab_comm = []
# Biolegend
ab_comm_table = parse_biolegend()
ab_unique = np.unique(ab_comm_table.dropna(subset=['GeneName'], axis=0)['GeneName'])
ab_comm.append(ab_unique)
# TODO: other vendors
if len(ab_comm):
ab_comm = np.unique(np.concatenate(ab_comm))
for tissue in args.tissues:
classifiers = []
for cell_type in args.cell_types:
print(cell_type)
clfs = {}
for clf_type in ('scattering', 'antibodies', 'combined'):
if args.subtissue is not None:
fn_glb = '../../data/classifiers/{:}_{:}_{:}_{:}'.format(
tissue.lower(),
args.subtissue.lower(),
cell_type.replace(' ', '_'),
clf_type,
)
else:
fn_glb = '../../data/classifiers/{:}_{:}_{:}'.format(
tissue.lower(),
cell_type.replace(' ', '_'),
clf_type,
)
fn_model = fn_glb+'.model.pickle'
fn_train = fn_glb+'.train.npz'
fn_meta = fn_glb+'.metadata.json'
fn_bundle = fn_glb+'.tar.gz'
with open(fn_meta, 'r') as f:
classifier = json.load(f)
clf = joblib.load(fn_model)
classifier['classifier'] = clf
train = np.load(fn_train)
classifier['X'] = train['X']
classifier['y'] = train['y']
classifier['cellnames'] = train['cellnames']
clfs[clf_type] = classifier
classifiers.append(clfs)
# Combine the classifiers
cells_common = np.intersect1d(
clfs['scattering']['cellnames'],
clfs['antibodies']['cellnames'],
)
Xs = pd.DataFrame(
data=clfs['scattering']['X'],
index=clfs['scattering']['cellnames'],
).loc[cells_common].values
ys = pd.Series(
data=clfs['scattering']['y'],
index=clfs['scattering']['cellnames'],
).loc[cells_common].values
clas = clfs['scattering']['classifier']
Xa = pd.DataFrame(
data=clfs['antibodies']['X'],
index=clfs['antibodies']['cellnames'],
).loc[cells_common].values
ya = pd.Series(
data=clfs['antibodies']['y'],
index=clfs['antibodies']['cellnames'],
).loc[cells_common].values
claa = clfs['antibodies']['classifier']
if (ys != ya).any():
raise ValueError('The true cell identity should be the same!')
cus = CombinedClassifier(
classifiers=[clas, claa],
logic=args.combination_logic)
from sklearn.externals import joblib
import json
for clfs in classifiers:
clf = clfs['combined']
fn = '{:}/university/postdoc/facsweb/app/facsweb/shared/static/data/merged_predictor_{:}_{:}.json'.format(
os.getenv('HOME'),
clf['tissue'],
clf['cell type'].replace(' ', '_'),
)
d = {}
d['tissue'] = clf['tissue']
d['cell type'] = clf['cell type']
d['data'] = {}
d['data']['scattering'] = clf['Xs'].tolist()
d['data']['antibodies'] = clf['Xa'].tolist()
d['data']['identity'] = clf['y'].tolist()
d['data']['cellnames'] = clf['cellnames'].tolist()
d['data']['scattering_axis_labels'] = [clf['xlabel'], clf['ylabel']]
d['data']['antibody_axis_labels'] = clf['genes']
d['data']['xlim_scattering'] = [0, clf['Xs'][:, 0].max() * 1.]
d['data']['ylim_scattering'] = [
np.floor(clf['Xs'][:, 1].min()),
np.ceil(clf['Xs'][:, 1].max()),
]
d['data']['xlim_antibodies'] = [
np.floor(clf['Xa'][:, 0].min()),
np.ceil(clf['Xa'][:, 0].max()),
]
d['data']['ylim_antibodies'] = [
np.floor(clf['Xa'][:, 1].min()),
np.ceil(clf['Xa'][:, 1].max()),
]
d['models'] = {
'combined': {},
'scattering': {},
'antibodies': {},
}
d['models']['combined']['precision'] = clf['precision']
d['models']['combined']['recall'] = clf['recall']
d['models']['combined']['logic'] = clf['combination_logic']
# Find roots of the classifiers
for clfname in ('scattering', 'antibodies'):
clfi = clfs[clfname]
xlim = [clf['X'+clfname[0]][:, 0].min(), clf['X'+clfname[0]][:, 0].max()]
ylim = [clf['X'+clfname[0]][:, 1].min(), clf['X'+clfname[0]][:, 1].max()]
xx = np.linspace(xlim[0], xlim[1], 500)
yy = np.linspace(ylim[0], ylim[1], 500)
xv, yv = np.meshgrid(xx, yy)
grid = np.vstack([xv.ravel(), yv.ravel()]).T
dec = clfi['classifier'].decision_function(grid)
roots = grid[np.abs(dec) < 0.02]
d['models'][clfname]['roots'] = roots.tolist()
roots_pos = grid[np.abs(dec - 0.25) < 0.02]
d['models'][clfname]['roots_pos'] = roots_pos.tolist()
roots_neg = grid[np.abs(dec + 0.25) < 0.02]
d['models'][clfname]['roots_neg'] = roots_neg.tolist()
d['models'][clfname]['precision'] = clfi['precision']
d['models'][clfname]['recall'] = clfi['recall']
with open(fn, 'w') as f:
json.dump(d, f)
print('Saved to file: {:}'.format(fn))
plt.ion()
plt.show()
| [
"fabio.zanini@fastmail.fm"
] | fabio.zanini@fastmail.fm |
3a8a6742d44c4a2169d12e211ea01c4f92cca229 | b805f0f0eed9c93ff564a719cb18de438a8572ee | /src/products/admin.py | f9f9c6aaa871a922d2ae3b74863216b0c40574a6 | [
"MIT"
] | permissive | pratikbarjatya/ecommerce | 1a3caae355d4dd06b4044fcce2d3fb48eae76034 | e18ba50c33c4b8f96c57785027f30b396104c47c | refs/heads/master | 2021-01-20T01:43:50.564162 | 2015-10-25T17:31:28 | 2015-10-25T17:31:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 616 | py | from django.contrib import admin
# Register your models here.
from .models import Product,Variation,ProductImage,Category,ProductFeatured
class ProductImageInline(admin.TabularInline):
model=ProductImage
extra=0
class VariationInline(admin.TabularInline):
model=Variation
extra=0
class ProductAdmin(admin.ModelAdmin):
list_display=['__str__','price']
inlines=[VariationInline,ProductImageInline,]
class Meta:
model=Product
admin.site.register(Product,ProductAdmin)
admin.site.register(Variation)
admin.site.register(ProductImage)
admin.site.register(Category)
admin.site.register(ProductFeatured) | [
"abhijit.bangera@hotmail.com"
] | abhijit.bangera@hotmail.com |
54b743456bd539834a2e5a137f35074640f31bd5 | 65a32b8a8a97c126843d2cfe79c43193ac2abc23 | /chapter3/list_split_exp.py | 97e41a94feed3cc9fc786adf2cf57f9a28e98b3b | [] | no_license | zhuyuedlut/advanced_programming | 9af2d6144e247168e492ddfb9af5d4a5667227c4 | a6e0456dd0b216b96829b5c3cef11df706525867 | refs/heads/master | 2023-03-19T09:21:31.234000 | 2020-10-09T13:09:38 | 2020-10-09T13:09:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | num_list = [1, 3, 5, 7, 9]
head, *rest = num_list
print(f'head is:{head}, rest list is:{rest}')
| [
"root@lyzdeMacBook.local"
] | root@lyzdeMacBook.local |
769e7d2055b1f12f280a29e7ebdd5b927ab628f7 | 99ed69aafb483b126f13fb8f0f5b31ad42e9829d | /pictures/urls.py | 19bb0699b4eb77e54dc18cb184126955f3e0af81 | [
"MIT"
] | permissive | Jackson-coder-arch/Instagram-pics | 3aba5da42a7cf8486651f91410364d1eafbcb722 | 9b4332d9f3144c4f655a0bf1313f0c4ef9481c4f | refs/heads/master | 2023-03-28T12:17:24.767827 | 2021-04-03T23:15:18 | 2021-04-03T23:15:18 | 351,892,988 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | from django.urls import path,re_path
from . import views
urlpatterns = [
path('',views.home, name = 'home'),
path('NewPost/',views.NewPost, name ='NewPost'),
path('profile/',views.profile,name ='profile')
] | [
"jacksonikonya@gmail.com"
] | jacksonikonya@gmail.com |
205155f6e5a48bf727a20d8fc592fec3365e0554 | 3be42b83a15d022f5863c96ec26e21bac0f7c27e | /spinoffs/oryx/oryx/experimental/__init__.py | 2364b4ee11a23f4fefa3f7fd65193a83401b1011 | [
"Apache-2.0"
] | permissive | ogrisel/probability | 846f5c13cddee5cf167b215e651b7479003f15d2 | 8f67456798615f9bf60ced2ce6db5d3dba3515fe | refs/heads/master | 2022-11-09T10:53:23.000918 | 2020-07-01T23:16:03 | 2020-07-01T23:17:25 | 276,580,359 | 2 | 1 | Apache-2.0 | 2020-07-02T07:37:58 | 2020-07-02T07:37:57 | null | UTF-8 | Python | false | false | 852 | py | # Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Lint as: python3
"""Module for experimental Oryx libraries."""
from oryx.experimental import mcmc
from oryx.experimental import nn
from oryx.experimental import optimizers
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
4fa176b40ebe223d94a0b8fd292d2a84b55ae1d8 | e3c8f786d09e311d6ea1cab50edde040bf1ea988 | /Incident-Response/Tools/grr/grr/core/grr_response_core/lib/parsers/cron_file_parser_test.py | f2c76e65b62e96e7ef20d6f26c6135a88e905aed | [
"Apache-2.0",
"MIT"
] | permissive | foss2cyber/Incident-Playbook | d1add8aec6e28a19e515754c6ce2e524d67f368e | a379a134c0c5af14df4ed2afa066c1626506b754 | refs/heads/main | 2023-06-07T09:16:27.876561 | 2021-07-07T03:48:54 | 2021-07-07T03:48:54 | 384,988,036 | 1 | 0 | MIT | 2021-07-11T15:45:31 | 2021-07-11T15:45:31 | null | UTF-8 | Python | false | false | 1,306 | py | #!/usr/bin/env python
"""Tests for grr.parsers.cron_file_parser."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
from absl import app
from grr_response_core.lib.parsers import cron_file_parser
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr.test_lib import test_lib
class TestCronTabParsing(test_lib.GRRBaseTest):
"""Test parsing of cron files."""
def testCronTabParser(self):
"""Ensure we can extract jobs from a crontab file."""
parser = cron_file_parser.CronTabParser()
results = []
path = os.path.join(self.base_path, "parser_test", "crontab")
plist_file = open(path, "rb")
pathspec = rdf_paths.PathSpec.OS(path=path)
results.extend(list(parser.ParseFile(None, pathspec, plist_file)))
self.assertLen(results, 1)
for result in results:
self.assertEqual(result.jobs[0].minute, "1")
self.assertEqual(result.jobs[0].hour, "2")
self.assertEqual(result.jobs[0].dayofmonth, "3")
self.assertEqual(result.jobs[0].month, "4")
self.assertEqual(result.jobs[0].dayofweek, "5")
self.assertEqual(result.jobs[0].command, "/usr/bin/echo \"test\"")
def main(args):
test_lib.main(args)
if __name__ == "__main__":
app.run(main)
| [
"a.songer@protonmail.com"
] | a.songer@protonmail.com |
6eb47e6751955fabbe37ab1d26ad336f8bf3ec86 | 97def1949bca845f40a2fb99fe7496e698f51764 | /examples/doc/samples/comparisons/cutstock/cutstock_lpsolve.py | a0a1c6de6dfd1616b7b64c59d7e7a5bf8f49ec6f | [
"BSD-3-Clause"
] | permissive | flexciton/pyomo | e009e5d300d27d943408a1ee5e0e1770d772a7fe | 817bebc9c10f527263b2b8402fb1c038f1b37cf1 | refs/heads/master | 2023-03-03T08:56:22.922613 | 2022-01-18T15:22:57 | 2022-01-18T15:22:57 | 241,679,253 | 1 | 1 | NOASSERTION | 2022-04-11T16:48:48 | 2020-02-19T17:24:37 | Python | UTF-8 | Python | false | false | 2,072 | py | from lpsolve55 import *
from cutstock_util import*
# Reading in Data using the cutstock_util
cutcount = getCutCount()
patcount = getPatCount()
Cuts = getCuts()
Patterns = getPatterns()
PriceSheet = getPriceSheetData()
SheetsAvail = getSheetsAvail()
CutDemand = getCutDemand()
CutsInPattern = getCutsInPattern()
########################################
varcount = cutcount + patcount + 1 + 1
PatCountStart = 2
# Objective Coeff Array
ObjCoeff = range(varcount)
for i in range(varcount):
if i == 0:
ObjCoeff[i] = PriceSheet
else:
ObjCoeff[i] = 0
#Arrays for constraints
TotCostB = range(varcount)
for i in TotCostB:
TotCostB[i] = 0
TotCostB[0] = -PriceSheet
TotCostB[1] = 1
RawAvailB = range(varcount)
for i in RawAvailB:
RawAvailB[i] = 0
RawAvailB[0] = 1
SheetsB = range(varcount)
for i in SheetsB:
SheetsB[i] = 0
SheetsB[0] = 1
for i in range(patcount):
SheetsB[i+PatCountStart] = -1
CutReqB = [[0 for col in range(varcount)] for row in range(cutcount)]
for i in range(cutcount):
for j in range(patcount):
CutReqB[i][j+PatCountStart] = CutsInPattern[i][j]
CutReqB[i][patcount+PatCountStart+i] = -1
###################################################
lp = lpsolve('make_lp', 0, varcount)
ret = lpsolve('set_lp_name', lp, 'CutStock')
lpsolve('set_verbose', 'CutStock', IMPORTANT)
#Define Objective
ret = lpsolve('set_obj_fn', 'CutStock', ObjCoeff)
#Define Constraints
ret = lpsolve('add_constraint', 'CutStock', TotCostB, EQ, 0)
ret = lpsolve('add_constraint', 'CutStock', RawAvailB, LE, SheetsAvail)
ret = lpsolve('add_constraint', 'CutStock', SheetsB, EQ, 0)
for i in range(cutcount):
ret = lpsolve('add_constraint', 'CutStock', CutReqB[i], EQ, CutDemand[i])
lpsolve('solve', 'CutStock')
#ret = lpsolve('write_lp', 'CutStock', 'cutstock.lp')
lpsolve('solve', 'CutStock')
statuscode = lpsolve('get_status', 'CutStock')
print lpsolve('get_statustext', 'CutStock', statuscode)
print lpsolve('get_objective', 'CutStock')
print lpsolve('get_variables', 'CutStock')[0]
| [
"jsiirola@users.noreply.github.com"
] | jsiirola@users.noreply.github.com |
f4f8430fa2204442f6554319fdea560177296146 | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /Skimage_numpy/source/scipy/stats/_binned_statistic.py | d64c7bb3204ffb6013ae91c4e0a4c4e35a6a648f | [
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | false | 25,272 | py | from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from scipy._lib.six import callable, xrange
from collections import namedtuple
__all__ = ['binned_statistic',
'binned_statistic_2d',
'binned_statistic_dd']
BinnedStatisticResult = namedtuple('BinnedStatisticResult',
('statistic', 'bin_edges', 'binnumber'))
def binned_statistic(x, values, statistic='mean',
bins=10, range=None):
"""
Compute a binned statistic for one or more sets of data.
This is a generalization of a histogram function. A histogram divides
the space into bins, and returns the count of the number of points in
each bin. This function allows the computation of the sum, mean, median,
or other statistic of the values (or set of values) within each bin.
Parameters
----------
x : (N,) array_like
A sequence of values to be binned.
values : (N,) array_like or list of (N,) array_like
The data on which the statistic will be computed. This must be
the same shape as `x`, or a set of sequences - each the same shape as
`x`. If `values` is a set of sequences, the statistic will be computed
on each independently.
statistic : string or callable, optional
The statistic to compute (default is 'mean').
The following statistics are available:
* 'mean' : compute the mean of values for points within each bin.
Empty bins will be represented by NaN.
* 'median' : compute the median of values for points within each
bin. Empty bins will be represented by NaN.
* 'count' : compute the count of points within each bin. This is
identical to an unweighted histogram. `values` array is not
referenced.
* 'sum' : compute the sum of values for points within each bin.
This is identical to a weighted histogram.
* function : a user-defined function which takes a 1D array of
values, and outputs a single numerical statistic. This function
will be called on the values in each bin. Empty bins will be
represented by function([]), or NaN if this returns an error.
bins : int or sequence of scalars, optional
If `bins` is an int, it defines the number of equal-width bins in the
given range (10 by default). If `bins` is a sequence, it defines the
bin edges, including the rightmost edge, allowing for non-uniform bin
widths. Values in `x` that are smaller than lowest bin edge are
assigned to bin number 0, values beyond the highest bin are assigned to
``bins[-1]``. If the bin edges are specified, the number of bins will
be, (nx = len(bins)-1).
range : (float, float) or [(float, float)], optional
The lower and upper range of the bins. If not provided, range
is simply ``(x.min(), x.max())``. Values outside the range are
ignored.
Returns
-------
statistic : array
The values of the selected statistic in each bin.
bin_edges : array of dtype float
Return the bin edges ``(length(statistic)+1)``.
binnumber: 1-D ndarray of ints
Indices of the bins (corresponding to `bin_edges`) in which each value
of `x` belongs. Same length as `values`. A binnumber of `i` means the
corresponding value is between (bin_edges[i-1], bin_edges[i]).
See Also
--------
numpy.digitize, numpy.histogram, binned_statistic_2d, binned_statistic_dd
Notes
-----
All but the last (righthand-most) bin is half-open. In other words, if
`bins` is ``[1, 2, 3, 4]``, then the first bin is ``[1, 2)`` (including 1,
but excluding 2) and the second ``[2, 3)``. The last bin, however, is
``[3, 4]``, which *includes* 4.
.. versionadded:: 0.11.0
Examples
--------
>>> from scipy import stats
>>> import matplotlib.pyplot as plt
First some basic examples:
Create two evenly spaced bins in the range of the given sample, and sum the
corresponding values in each of those bins:
>>> values = [1.0, 1.0, 2.0, 1.5, 3.0]
>>> stats.binned_statistic([1, 1, 2, 5, 7], values, 'sum', bins=2)
(array([ 4. , 4.5]), array([ 1., 4., 7.]), array([1, 1, 1, 2, 2]))
Multiple arrays of values can also be passed. The statistic is calculated
on each set independently:
>>> values = [[1.0, 1.0, 2.0, 1.5, 3.0], [2.0, 2.0, 4.0, 3.0, 6.0]]
>>> stats.binned_statistic([1, 1, 2, 5, 7], values, 'sum', bins=2)
(array([[ 4. , 4.5], [ 8. , 9. ]]), array([ 1., 4., 7.]),
array([1, 1, 1, 2, 2]))
>>> stats.binned_statistic([1, 2, 1, 2, 4], np.arange(5), statistic='mean',
... bins=3)
(array([ 1., 2., 4.]), array([ 1., 2., 3., 4.]),
array([1, 2, 1, 2, 3]))
As a second example, we now generate some random data of sailing boat speed
as a function of wind speed, and then determine how fast our boat is for
certain wind speeds:
>>> windspeed = 8 * np.random.rand(500)
>>> boatspeed = .3 * windspeed**.5 + .2 * np.random.rand(500)
>>> bin_means, bin_edges, binnumber = stats.binned_statistic(windspeed,
... boatspeed, statistic='median', bins=[1,2,3,4,5,6,7])
>>> plt.figure()
>>> plt.plot(windspeed, boatspeed, 'b.', label='raw data')
>>> plt.hlines(bin_means, bin_edges[:-1], bin_edges[1:], colors='g', lw=5,
... label='binned statistic of data')
>>> plt.legend()
Now we can use ``binnumber`` to select all datapoints with a windspeed
below 1:
>>> low_boatspeed = boatspeed[binnumber == 0]
As a final example, we will use ``bin_edges`` and ``binnumber`` to make a
plot of a distribution that shows the mean and distribution around that
mean per bin, on top of a regular histogram and the probability
distribution function:
>>> x = np.linspace(0, 5, num=500)
>>> x_pdf = stats.maxwell.pdf(x)
>>> samples = stats.maxwell.rvs(size=10000)
>>> bin_means, bin_edges, binnumber = stats.binned_statistic(x, x_pdf,
... statistic='mean', bins=25)
>>> bin_width = (bin_edges[1] - bin_edges[0])
>>> bin_centers = bin_edges[1:] - bin_width/2
>>> plt.figure()
>>> plt.hist(samples, bins=50, normed=True, histtype='stepfilled',
... alpha=0.2, label='histogram of data')
>>> plt.plot(x, x_pdf, 'r-', label='analytical pdf')
>>> plt.hlines(bin_means, bin_edges[:-1], bin_edges[1:], colors='g', lw=2,
... label='binned statistic of data')
>>> plt.plot((binnumber - 0.5) * bin_width, x_pdf, 'g.', alpha=0.5)
>>> plt.legend(fontsize=10)
>>> plt.show()
"""
try:
N = len(bins)
except TypeError:
N = 1
if N != 1:
bins = [np.asarray(bins, float)]
if range is not None:
if len(range) == 2:
range = [range]
medians, edges, binnumbers = binned_statistic_dd(
[x], values, statistic, bins, range)
return BinnedStatisticResult(medians, edges[0], binnumbers)
BinnedStatistic2dResult = namedtuple('BinnedStatistic2dResult',
('statistic', 'x_edge', 'y_edge',
'binnumber'))
def binned_statistic_2d(x, y, values, statistic='mean',
bins=10, range=None, expand_binnumbers=False):
"""
Compute a bidimensional binned statistic for one or more sets of data.
This is a generalization of a histogram2d function. A histogram divides
the space into bins, and returns the count of the number of points in
each bin. This function allows the computation of the sum, mean, median,
or other statistic of the values (or set of values) within each bin.
Parameters
----------
x : (N,) array_like
A sequence of values to be binned along the first dimension.
y : (N,) array_like
A sequence of values to be binned along the second dimension.
values : (N,) array_like or list of (N,) array_like
The data on which the statistic will be computed. This must be
the same shape as `x`, or a list of sequences - each with the same
shape as `x`. If `values` is such a list, the statistic will be
computed on each independently.
statistic : string or callable, optional
The statistic to compute (default is 'mean').
The following statistics are available:
* 'mean' : compute the mean of values for points within each bin.
Empty bins will be represented by NaN.
* 'median' : compute the median of values for points within each
bin. Empty bins will be represented by NaN.
* 'count' : compute the count of points within each bin. This is
identical to an unweighted histogram. `values` array is not
referenced.
* 'sum' : compute the sum of values for points within each bin.
This is identical to a weighted histogram.
* function : a user-defined function which takes a 1D array of
values, and outputs a single numerical statistic. This function
will be called on the values in each bin. Empty bins will be
represented by function([]), or NaN if this returns an error.
bins : int or [int, int] or array_like or [array, array], optional
The bin specification:
* the number of bins for the two dimensions (nx = ny = bins),
* the number of bins in each dimension (nx, ny = bins),
* the bin edges for the two dimensions (x_edge = y_edge = bins),
* the bin edges in each dimension (x_edge, y_edge = bins).
If the bin edges are specified, the number of bins will be,
(nx = len(x_edge)-1, ny = len(y_edge)-1).
range : (2,2) array_like, optional
The leftmost and rightmost edges of the bins along each dimension
(if not specified explicitly in the `bins` parameters):
[[xmin, xmax], [ymin, ymax]]. All values outside of this range will be
considered outliers and not tallied in the histogram.
expand_binnumbers : bool, optional
'False' (default): the returned `binnumber` is a shape (N,) array of
linearized bin indices.
'True': the returned `binnumber` is 'unraveled' into a shape (2,N)
ndarray, where each row gives the bin numbers in the corresponding
dimension.
See the `binnumber` returned value, and the `Examples` section.
.. versionadded:: 0.17.0
Returns
-------
statistic : (nx, ny) ndarray
The values of the selected statistic in each two-dimensional bin.
x_edge : (nx + 1) ndarray
The bin edges along the first dimension.
y_edge : (ny + 1) ndarray
The bin edges along the second dimension.
binnumber : (N,) array of ints or (2,N) ndarray of ints
This assigns to each element of `sample` an integer that represents the
bin in which this observation falls. The representation depends on the
`expand_binnumbers` argument. See `Notes` for details.
See Also
--------
numpy.digitize, numpy.histogram2d, binned_statistic, binned_statistic_dd
Notes
-----
Binedges:
All but the last (righthand-most) bin is half-open. In other words, if
`bins` is ``[1, 2, 3, 4]``, then the first bin is ``[1, 2)`` (including 1,
but excluding 2) and the second ``[2, 3)``. The last bin, however, is
``[3, 4]``, which *includes* 4.
`binnumber`:
This returned argument assigns to each element of `sample` an integer that
represents the bin in which it belongs. The representation depends on the
`expand_binnumbers` argument. If 'False' (default): The returned
`binnumber` is a shape (N,) array of linearized indices mapping each
element of `sample` to its corresponding bin (using row-major ordering).
If 'True': The returned `binnumber` is a shape (2,N) ndarray where
each row indicates bin placements for each dimension respectively. In each
dimension, a binnumber of `i` means the corresponding value is between
(D_edge[i-1], D_edge[i]), where 'D' is either 'x' or 'y'.
.. versionadded:: 0.11.0
Examples
--------
>>> from scipy import stats
Calculate the counts with explicit bin-edges:
>>> x = [0.1, 0.1, 0.1, 0.6]
>>> y = [2.1, 2.6, 2.1, 2.1]
>>> binx = [0.0, 0.5, 1.0]
>>> biny = [2.0, 2.5, 3.0]
>>> ret = stats.binned_statistic_2d(x, y, None, 'count', bins=[binx,biny])
>>> ret.statistic
array([[ 2., 1.],
[ 1., 0.]])
The bin in which each sample is placed is given by the `binnumber`
returned parameter. By default, these are the linearized bin indices:
>>> ret.binnumber
array([5, 6, 5, 9])
The bin indices can also be expanded into separate entries for each
dimension using the `expand_binnumbers` parameter:
>>> ret = stats.binned_statistic_2d(x, y, None, 'count', bins=[binx,biny],
... expand_binnumbers=True)
>>> ret.binnumber
array([[1, 1, 1, 2],
[1, 2, 1, 1]])
Which shows that the first three elements belong in the xbin 1, and the
fourth into xbin 2; and so on for y.
"""
# This code is based on np.histogram2d
try:
N = len(bins)
except TypeError:
N = 1
if N != 1 and N != 2:
xedges = yedges = np.asarray(bins, float)
bins = [xedges, yedges]
medians, edges, binnumbers = binned_statistic_dd(
[x, y], values, statistic, bins, range,
expand_binnumbers=expand_binnumbers)
return BinnedStatistic2dResult(medians, edges[0], edges[1], binnumbers)
BinnedStatisticddResult = namedtuple('BinnedStatisticddResult',
('statistic', 'bin_edges',
'binnumber'))
def binned_statistic_dd(sample, values, statistic='mean',
bins=10, range=None, expand_binnumbers=False):
"""
Compute a multidimensional binned statistic for a set of data.
This is a generalization of a histogramdd function. A histogram divides
the space into bins, and returns the count of the number of points in
each bin. This function allows the computation of the sum, mean, median,
or other statistic of the values within each bin.
Parameters
----------
sample : array_like
Data to histogram passed as a sequence of D arrays of length N, or
as an (N,D) array.
values : (N,) array_like or list of (N,) array_like
The data on which the statistic will be computed. This must be
the same shape as `x`, or a list of sequences - each with the same
shape as `x`. If `values` is such a list, the statistic will be
computed on each independently.
statistic : string or callable, optional
The statistic to compute (default is 'mean').
The following statistics are available:
* 'mean' : compute the mean of values for points within each bin.
Empty bins will be represented by NaN.
* 'median' : compute the median of values for points within each
bin. Empty bins will be represented by NaN.
* 'count' : compute the count of points within each bin. This is
identical to an unweighted histogram. `values` array is not
referenced.
* 'sum' : compute the sum of values for points within each bin.
This is identical to a weighted histogram.
* function : a user-defined function which takes a 1D array of
values, and outputs a single numerical statistic. This function
will be called on the values in each bin. Empty bins will be
represented by function([]), or NaN if this returns an error.
bins : sequence or int, optional
The bin specification must be in one of the following forms:
* A sequence of arrays describing the bin edges along each dimension.
* The number of bins for each dimension (nx, ny, ... = bins).
* The number of bins for all dimensions (nx = ny = ... = bins).
range : sequence, optional
A sequence of lower and upper bin edges to be used if the edges are
not given explicitely in `bins`. Defaults to the minimum and maximum
values along each dimension.
expand_binnumbers : bool, optional
'False' (default): the returned `binnumber` is a shape (N,) array of
linearized bin indices.
'True': the returned `binnumber` is 'unraveled' into a shape (D,N)
ndarray, where each row gives the bin numbers in the corresponding
dimension.
See the `binnumber` returned value, and the `Examples` section of
`binned_statistic_2d`.
.. versionadded:: 0.17.0
Returns
-------
statistic : ndarray, shape(nx1, nx2, nx3,...)
The values of the selected statistic in each two-dimensional bin.
bin_edges : list of ndarrays
A list of D arrays describing the (nxi + 1) bin edges for each
dimension.
binnumber : (N,) array of ints or (D,N) ndarray of ints
This assigns to each element of `sample` an integer that represents the
bin in which this observation falls. The representation depends on the
`expand_binnumbers` argument. See `Notes` for details.
See Also
--------
numpy.digitize, numpy.histogramdd, binned_statistic, binned_statistic_2d
Notes
-----
Binedges:
All but the last (righthand-most) bin is half-open in each dimension. In
other words, if `bins` is ``[1, 2, 3, 4]``, then the first bin is
``[1, 2)`` (including 1, but excluding 2) and the second ``[2, 3)``. The
last bin, however, is ``[3, 4]``, which *includes* 4.
`binnumber`:
This returned argument assigns to each element of `sample` an integer that
represents the bin in which it belongs. The representation depends on the
`expand_binnumbers` argument. If 'False' (default): The returned
`binnumber` is a shape (N,) array of linearized indices mapping each
element of `sample` to its corresponding bin (using row-major ordering).
If 'True': The returned `binnumber` is a shape (D,N) ndarray where
each row indicates bin placements for each dimension respectively. In each
dimension, a binnumber of `i` means the corresponding value is between
(bin_edges[D][i-1], bin_edges[D][i]), for each dimension 'D'.
.. versionadded:: 0.11.0
"""
known_stats = ['mean', 'median', 'count', 'sum', 'std']
if not callable(statistic) and statistic not in known_stats:
raise ValueError('invalid statistic %r' % (statistic,))
# `Ndim` is the number of dimensions (e.g. `2` for `binned_statistic_2d`)
# `Dlen` is the length of elements along each dimension.
# This code is based on np.histogramdd
try:
# `sample` is an ND-array.
Dlen, Ndim = sample.shape
except (AttributeError, ValueError):
# `sample` is a sequence of 1D arrays.
sample = np.atleast_2d(sample).T
Dlen, Ndim = sample.shape
# Store initial shape of `values` to preserve it in the output
values = np.asarray(values)
input_shape = list(values.shape)
# Make sure that `values` is 2D to iterate over rows
values = np.atleast_2d(values)
Vdim, Vlen = values.shape
# Make sure `values` match `sample`
if(statistic != 'count' and Vlen != Dlen):
raise AttributeError('The number of `values` elements must match the '
'length of each `sample` dimension.')
nbin = np.empty(Ndim, int) # Number of bins in each dimension
edges = Ndim * [None] # Bin edges for each dim (will be 2D array)
dedges = Ndim * [None] # Spacing between edges (will be 2D array)
try:
M = len(bins)
if M != Ndim:
raise AttributeError('The dimension of bins must be equal '
'to the dimension of the sample x.')
except TypeError:
bins = Ndim * [bins]
# Select range for each dimension
# Used only if number of bins is given.
if range is None:
smin = np.atleast_1d(np.array(sample.min(axis=0), float))
smax = np.atleast_1d(np.array(sample.max(axis=0), float))
else:
smin = np.zeros(Ndim)
smax = np.zeros(Ndim)
for i in xrange(Ndim):
smin[i], smax[i] = range[i]
# Make sure the bins have a finite width.
for i in xrange(len(smin)):
if smin[i] == smax[i]:
smin[i] = smin[i] - .5
smax[i] = smax[i] + .5
# Create edge arrays
for i in xrange(Ndim):
if np.isscalar(bins[i]):
nbin[i] = bins[i] + 2 # +2 for outlier bins
edges[i] = np.linspace(smin[i], smax[i], nbin[i] - 1)
else:
edges[i] = np.asarray(bins[i], float)
nbin[i] = len(edges[i]) + 1 # +1 for outlier bins
dedges[i] = np.diff(edges[i])
nbin = np.asarray(nbin)
# Compute the bin number each sample falls into, in each dimension
sampBin = {}
for i in xrange(Ndim):
sampBin[i] = np.digitize(sample[:, i], edges[i])
# Using `digitize`, values that fall on an edge are put in the right bin.
# For the rightmost bin, we want values equal to the right
# edge to be counted in the last bin, and not as an outlier.
for i in xrange(Ndim):
# Find the rounding precision
decimal = int(-np.log10(dedges[i].min())) + 6
# Find which points are on the rightmost edge.
on_edge = np.where(np.around(sample[:, i], decimal) ==
np.around(edges[i][-1], decimal))[0]
# Shift these points one bin to the left.
sampBin[i][on_edge] -= 1
# Compute the sample indices in the flattened statistic matrix.
ni = nbin.argsort()
# `binnumbers` is which bin (in linearized `Ndim` space) each sample goes
binnumbers = np.zeros(Dlen, int)
for i in xrange(0, Ndim - 1):
binnumbers += sampBin[ni[i]] * nbin[ni[i + 1:]].prod()
binnumbers += sampBin[ni[-1]]
result = np.empty([Vdim, nbin.prod()], float)
if statistic == 'mean':
result.fill(np.nan)
flatcount = np.bincount(binnumbers, None)
a = flatcount.nonzero()
for vv in xrange(Vdim):
flatsum = np.bincount(binnumbers, values[vv])
result[vv, a] = flatsum[a] / flatcount[a]
elif statistic == 'std':
result.fill(0)
flatcount = np.bincount(binnumbers, None)
a = flatcount.nonzero()
for vv in xrange(Vdim):
flatsum = np.bincount(binnumbers, values[vv])
flatsum2 = np.bincount(binnumbers, values[vv] ** 2)
result[vv, a] = np.sqrt(flatsum2[a] / flatcount[a] -
(flatsum[a] / flatcount[a]) ** 2)
elif statistic == 'count':
result.fill(0)
flatcount = np.bincount(binnumbers, None)
a = np.arange(len(flatcount))
result[:, a] = flatcount[np.newaxis, :]
elif statistic == 'sum':
result.fill(0)
for vv in xrange(Vdim):
flatsum = np.bincount(binnumbers, values[vv])
a = np.arange(len(flatsum))
result[vv, a] = flatsum
elif statistic == 'median':
result.fill(np.nan)
for i in np.unique(binnumbers):
for vv in xrange(Vdim):
result[vv, i] = np.median(values[vv, binnumbers == i])
elif callable(statistic):
with warnings.catch_warnings():
# Numpy generates a warnings for mean/std/... with empty list
warnings.filterwarnings('ignore', category=RuntimeWarning)
old = np.seterr(invalid='ignore')
try:
null = statistic([])
except:
null = np.nan
np.seterr(**old)
result.fill(null)
for i in np.unique(binnumbers):
for vv in xrange(Vdim):
result[vv, i] = statistic(values[vv, binnumbers == i])
# Shape into a proper matrix
result = result.reshape(np.append(Vdim, np.sort(nbin)))
for i in xrange(nbin.size):
j = ni.argsort()[i]
# Accomodate the extra `Vdim` dimension-zero with `+1`
result = result.swapaxes(i+1, j+1)
ni[i], ni[j] = ni[j], ni[i]
# Remove outliers (indices 0 and -1 for each bin-dimension).
core = [slice(None)] + Ndim * [slice(1, -1)]
result = result[core]
# Unravel binnumbers into an ndarray, each row the bins for each dimension
if(expand_binnumbers and Ndim > 1):
binnumbers = np.asarray(np.unravel_index(binnumbers, nbin))
if np.any(result.shape[1:] != nbin - 2):
raise RuntimeError('Internal Shape Error')
# Reshape to have output (`reulst`) match input (`values`) shape
result = result.reshape(input_shape[:-1] + list(nbin-2))
return BinnedStatisticddResult(result, edges, binnumbers)
| [
"ryfeus@gmail.com"
] | ryfeus@gmail.com |
0d3c21ced8d3db5eb9c906ef396294c167eaaf60 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/iothub/azure-mgmt-iothub/azure/mgmt/iothub/v2022_04_30_preview/models/_models_py3.py | 00a1e91a26c71ae9cd2636d7d9e0f6bdfffab148 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 185,659 | py | # coding=utf-8
# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
import sys
from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union
from ... import _serialization
if sys.version_info >= (3, 9):
from collections.abc import MutableMapping
else:
from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object
class ArmIdentity(_serialization.Model):
"""ArmIdentity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar principal_id: Principal Id.
:vartype principal_id: str
:ivar tenant_id: Tenant Id.
:vartype tenant_id: str
:ivar type: The type of identity used for the resource. The type 'SystemAssigned,UserAssigned'
includes both an implicitly created identity and a set of user assigned identities. The type
'None' will remove any identities from the service. Known values are: "SystemAssigned",
"UserAssigned", "SystemAssigned, UserAssigned", and "None".
:vartype type: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.ResourceIdentityType
:ivar user_assigned_identities: Dictionary of :code:`<ArmUserIdentity>`.
:vartype user_assigned_identities: dict[str,
~azure.mgmt.iothub.v2022_04_30_preview.models.ArmUserIdentity]
"""
_validation = {
"principal_id": {"readonly": True},
"tenant_id": {"readonly": True},
}
_attribute_map = {
"principal_id": {"key": "principalId", "type": "str"},
"tenant_id": {"key": "tenantId", "type": "str"},
"type": {"key": "type", "type": "str"},
"user_assigned_identities": {"key": "userAssignedIdentities", "type": "{ArmUserIdentity}"},
}
def __init__(
self,
*,
type: Optional[Union[str, "_models.ResourceIdentityType"]] = None,
user_assigned_identities: Optional[Dict[str, "_models.ArmUserIdentity"]] = None,
**kwargs: Any
) -> None:
"""
:keyword type: The type of identity used for the resource. The type
'SystemAssigned,UserAssigned' includes both an implicitly created identity and a set of user
assigned identities. The type 'None' will remove any identities from the service. Known values
are: "SystemAssigned", "UserAssigned", "SystemAssigned, UserAssigned", and "None".
:paramtype type: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.ResourceIdentityType
:keyword user_assigned_identities: Dictionary of :code:`<ArmUserIdentity>`.
:paramtype user_assigned_identities: dict[str,
~azure.mgmt.iothub.v2022_04_30_preview.models.ArmUserIdentity]
"""
super().__init__(**kwargs)
self.principal_id = None
self.tenant_id = None
self.type = type
self.user_assigned_identities = user_assigned_identities
class ArmUserIdentity(_serialization.Model):
"""ArmUserIdentity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar principal_id:
:vartype principal_id: str
:ivar client_id:
:vartype client_id: str
"""
_validation = {
"principal_id": {"readonly": True},
"client_id": {"readonly": True},
}
_attribute_map = {
"principal_id": {"key": "principalId", "type": "str"},
"client_id": {"key": "clientId", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.principal_id = None
self.client_id = None
class CertificateBodyDescription(_serialization.Model):
"""The JSON-serialized X509 Certificate.
:ivar certificate: base-64 representation of the X509 leaf certificate .cer file or just .pem
file content.
:vartype certificate: str
:ivar is_verified: True indicates that the certificate will be created in verified state and
proof of possession will not be required.
:vartype is_verified: bool
"""
_attribute_map = {
"certificate": {"key": "certificate", "type": "str"},
"is_verified": {"key": "isVerified", "type": "bool"},
}
def __init__(self, *, certificate: Optional[str] = None, is_verified: Optional[bool] = None, **kwargs: Any) -> None:
"""
:keyword certificate: base-64 representation of the X509 leaf certificate .cer file or just
.pem file content.
:paramtype certificate: str
:keyword is_verified: True indicates that the certificate will be created in verified state and
proof of possession will not be required.
:paramtype is_verified: bool
"""
super().__init__(**kwargs)
self.certificate = certificate
self.is_verified = is_verified
class CertificateDescription(_serialization.Model):
"""The X509 Certificate.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar properties: The description of an X509 CA Certificate.
:vartype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.CertificateProperties
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The name of the certificate.
:vartype name: str
:ivar etag: The entity tag.
:vartype etag: str
:ivar type: The resource type.
:vartype type: str
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"etag": {"readonly": True},
"type": {"readonly": True},
}
_attribute_map = {
"properties": {"key": "properties", "type": "CertificateProperties"},
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"type": {"key": "type", "type": "str"},
}
def __init__(self, *, properties: Optional["_models.CertificateProperties"] = None, **kwargs: Any) -> None:
"""
:keyword properties: The description of an X509 CA Certificate.
:paramtype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.CertificateProperties
"""
super().__init__(**kwargs)
self.properties = properties
self.id = None
self.name = None
self.etag = None
self.type = None
class CertificateListDescription(_serialization.Model):
"""The JSON-serialized array of Certificate objects.
:ivar value: The array of Certificate objects.
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.CertificateDescription]
"""
_attribute_map = {
"value": {"key": "value", "type": "[CertificateDescription]"},
}
def __init__(self, *, value: Optional[List["_models.CertificateDescription"]] = None, **kwargs: Any) -> None:
"""
:keyword value: The array of Certificate objects.
:paramtype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.CertificateDescription]
"""
super().__init__(**kwargs)
self.value = value
class CertificateProperties(_serialization.Model):
"""The description of an X509 CA Certificate.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar subject: The certificate's subject name.
:vartype subject: str
:ivar expiry: The certificate's expiration date and time.
:vartype expiry: ~datetime.datetime
:ivar thumbprint: The certificate's thumbprint.
:vartype thumbprint: str
:ivar is_verified: Determines whether certificate has been verified.
:vartype is_verified: bool
:ivar created: The certificate's create date and time.
:vartype created: ~datetime.datetime
:ivar updated: The certificate's last update date and time.
:vartype updated: ~datetime.datetime
:ivar certificate: The certificate content.
:vartype certificate: str
"""
_validation = {
"subject": {"readonly": True},
"expiry": {"readonly": True},
"thumbprint": {"readonly": True},
"created": {"readonly": True},
"updated": {"readonly": True},
}
_attribute_map = {
"subject": {"key": "subject", "type": "str"},
"expiry": {"key": "expiry", "type": "rfc-1123"},
"thumbprint": {"key": "thumbprint", "type": "str"},
"is_verified": {"key": "isVerified", "type": "bool"},
"created": {"key": "created", "type": "rfc-1123"},
"updated": {"key": "updated", "type": "rfc-1123"},
"certificate": {"key": "certificate", "type": "str"},
}
def __init__(self, *, is_verified: Optional[bool] = None, certificate: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword is_verified: Determines whether certificate has been verified.
:paramtype is_verified: bool
:keyword certificate: The certificate content.
:paramtype certificate: str
"""
super().__init__(**kwargs)
self.subject = None
self.expiry = None
self.thumbprint = None
self.is_verified = is_verified
self.created = None
self.updated = None
self.certificate = certificate
class CertificatePropertiesWithNonce(_serialization.Model):
"""The description of an X509 CA Certificate including the challenge nonce issued for the
Proof-Of-Possession flow.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar subject: The certificate's subject name.
:vartype subject: str
:ivar expiry: The certificate's expiration date and time.
:vartype expiry: ~datetime.datetime
:ivar thumbprint: The certificate's thumbprint.
:vartype thumbprint: str
:ivar is_verified: Determines whether certificate has been verified.
:vartype is_verified: bool
:ivar created: The certificate's create date and time.
:vartype created: ~datetime.datetime
:ivar updated: The certificate's last update date and time.
:vartype updated: ~datetime.datetime
:ivar verification_code: The certificate's verification code that will be used for proof of
possession.
:vartype verification_code: str
:ivar certificate: The certificate content.
:vartype certificate: str
"""
_validation = {
"subject": {"readonly": True},
"expiry": {"readonly": True},
"thumbprint": {"readonly": True},
"is_verified": {"readonly": True},
"created": {"readonly": True},
"updated": {"readonly": True},
"verification_code": {"readonly": True},
"certificate": {"readonly": True},
}
_attribute_map = {
"subject": {"key": "subject", "type": "str"},
"expiry": {"key": "expiry", "type": "rfc-1123"},
"thumbprint": {"key": "thumbprint", "type": "str"},
"is_verified": {"key": "isVerified", "type": "bool"},
"created": {"key": "created", "type": "rfc-1123"},
"updated": {"key": "updated", "type": "rfc-1123"},
"verification_code": {"key": "verificationCode", "type": "str"},
"certificate": {"key": "certificate", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.subject = None
self.expiry = None
self.thumbprint = None
self.is_verified = None
self.created = None
self.updated = None
self.verification_code = None
self.certificate = None
class CertificateVerificationDescription(_serialization.Model):
"""The JSON-serialized leaf certificate.
:ivar certificate: base-64 representation of X509 certificate .cer file or just .pem file
content.
:vartype certificate: str
"""
_attribute_map = {
"certificate": {"key": "certificate", "type": "str"},
}
def __init__(self, *, certificate: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword certificate: base-64 representation of X509 certificate .cer file or just .pem file
content.
:paramtype certificate: str
"""
super().__init__(**kwargs)
self.certificate = certificate
class CertificateWithNonceDescription(_serialization.Model):
"""The X509 Certificate.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar properties: The description of an X509 CA Certificate including the challenge nonce
issued for the Proof-Of-Possession flow.
:vartype properties:
~azure.mgmt.iothub.v2022_04_30_preview.models.CertificatePropertiesWithNonce
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The name of the certificate.
:vartype name: str
:ivar etag: The entity tag.
:vartype etag: str
:ivar type: The resource type.
:vartype type: str
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"etag": {"readonly": True},
"type": {"readonly": True},
}
_attribute_map = {
"properties": {"key": "properties", "type": "CertificatePropertiesWithNonce"},
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"etag": {"key": "etag", "type": "str"},
"type": {"key": "type", "type": "str"},
}
def __init__(self, *, properties: Optional["_models.CertificatePropertiesWithNonce"] = None, **kwargs: Any) -> None:
"""
:keyword properties: The description of an X509 CA Certificate including the challenge nonce
issued for the Proof-Of-Possession flow.
:paramtype properties:
~azure.mgmt.iothub.v2022_04_30_preview.models.CertificatePropertiesWithNonce
"""
super().__init__(**kwargs)
self.properties = properties
self.id = None
self.name = None
self.etag = None
self.type = None
class CloudToDeviceProperties(_serialization.Model):
"""The IoT hub cloud-to-device messaging properties.
:ivar max_delivery_count: The max delivery count for cloud-to-device messages in the device
queue. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:vartype max_delivery_count: int
:ivar default_ttl_as_iso8601: The default time to live for cloud-to-device messages in the
device queue. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:vartype default_ttl_as_iso8601: ~datetime.timedelta
:ivar feedback: The properties of the feedback queue for cloud-to-device messages.
:vartype feedback: ~azure.mgmt.iothub.v2022_04_30_preview.models.FeedbackProperties
"""
_validation = {
"max_delivery_count": {"maximum": 100, "minimum": 1},
}
_attribute_map = {
"max_delivery_count": {"key": "maxDeliveryCount", "type": "int"},
"default_ttl_as_iso8601": {"key": "defaultTtlAsIso8601", "type": "duration"},
"feedback": {"key": "feedback", "type": "FeedbackProperties"},
}
def __init__(
self,
*,
max_delivery_count: Optional[int] = None,
default_ttl_as_iso8601: Optional[datetime.timedelta] = None,
feedback: Optional["_models.FeedbackProperties"] = None,
**kwargs: Any
) -> None:
"""
:keyword max_delivery_count: The max delivery count for cloud-to-device messages in the device
queue. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:paramtype max_delivery_count: int
:keyword default_ttl_as_iso8601: The default time to live for cloud-to-device messages in the
device queue. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:paramtype default_ttl_as_iso8601: ~datetime.timedelta
:keyword feedback: The properties of the feedback queue for cloud-to-device messages.
:paramtype feedback: ~azure.mgmt.iothub.v2022_04_30_preview.models.FeedbackProperties
"""
super().__init__(**kwargs)
self.max_delivery_count = max_delivery_count
self.default_ttl_as_iso8601 = default_ttl_as_iso8601
self.feedback = feedback
class EncryptionPropertiesDescription(_serialization.Model):
"""The encryption properties for the IoT hub.
:ivar key_source: The source of the key.
:vartype key_source: str
:ivar key_vault_properties: The properties of the KeyVault key.
:vartype key_vault_properties:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.KeyVaultKeyProperties]
"""
_attribute_map = {
"key_source": {"key": "keySource", "type": "str"},
"key_vault_properties": {"key": "keyVaultProperties", "type": "[KeyVaultKeyProperties]"},
}
def __init__(
self,
*,
key_source: Optional[str] = None,
key_vault_properties: Optional[List["_models.KeyVaultKeyProperties"]] = None,
**kwargs: Any
) -> None:
"""
:keyword key_source: The source of the key.
:paramtype key_source: str
:keyword key_vault_properties: The properties of the KeyVault key.
:paramtype key_vault_properties:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.KeyVaultKeyProperties]
"""
super().__init__(**kwargs)
self.key_source = key_source
self.key_vault_properties = key_vault_properties
class EndpointHealthData(_serialization.Model):
"""The health data for an endpoint.
:ivar endpoint_id: Id of the endpoint.
:vartype endpoint_id: str
:ivar health_status: Health statuses have following meanings. The 'healthy' status shows that
the endpoint is accepting messages as expected. The 'unhealthy' status shows that the endpoint
is not accepting messages as expected and IoT Hub is retrying to send data to this endpoint.
The status of an unhealthy endpoint will be updated to healthy when IoT Hub has established an
eventually consistent state of health. The 'dead' status shows that the endpoint is not
accepting messages, after IoT Hub retried sending messages for the retrial period. See IoT Hub
metrics to identify errors and monitor issues with endpoints. The 'unknown' status shows that
the IoT Hub has not established a connection with the endpoint. No messages have been delivered
to or rejected from this endpoint. Known values are: "unknown", "healthy", "degraded",
"unhealthy", and "dead".
:vartype health_status: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.EndpointHealthStatus
:ivar last_known_error: Last error obtained when a message failed to be delivered to iot hub.
:vartype last_known_error: str
:ivar last_known_error_time: Time at which the last known error occurred.
:vartype last_known_error_time: ~datetime.datetime
:ivar last_successful_send_attempt_time: Last time iot hub successfully sent a message to the
endpoint.
:vartype last_successful_send_attempt_time: ~datetime.datetime
:ivar last_send_attempt_time: Last time iot hub tried to send a message to the endpoint.
:vartype last_send_attempt_time: ~datetime.datetime
"""
_attribute_map = {
"endpoint_id": {"key": "endpointId", "type": "str"},
"health_status": {"key": "healthStatus", "type": "str"},
"last_known_error": {"key": "lastKnownError", "type": "str"},
"last_known_error_time": {"key": "lastKnownErrorTime", "type": "rfc-1123"},
"last_successful_send_attempt_time": {"key": "lastSuccessfulSendAttemptTime", "type": "rfc-1123"},
"last_send_attempt_time": {"key": "lastSendAttemptTime", "type": "rfc-1123"},
}
def __init__(
self,
*,
endpoint_id: Optional[str] = None,
health_status: Optional[Union[str, "_models.EndpointHealthStatus"]] = None,
last_known_error: Optional[str] = None,
last_known_error_time: Optional[datetime.datetime] = None,
last_successful_send_attempt_time: Optional[datetime.datetime] = None,
last_send_attempt_time: Optional[datetime.datetime] = None,
**kwargs: Any
) -> None:
"""
:keyword endpoint_id: Id of the endpoint.
:paramtype endpoint_id: str
:keyword health_status: Health statuses have following meanings. The 'healthy' status shows
that the endpoint is accepting messages as expected. The 'unhealthy' status shows that the
endpoint is not accepting messages as expected and IoT Hub is retrying to send data to this
endpoint. The status of an unhealthy endpoint will be updated to healthy when IoT Hub has
established an eventually consistent state of health. The 'dead' status shows that the endpoint
is not accepting messages, after IoT Hub retried sending messages for the retrial period. See
IoT Hub metrics to identify errors and monitor issues with endpoints. The 'unknown' status
shows that the IoT Hub has not established a connection with the endpoint. No messages have
been delivered to or rejected from this endpoint. Known values are: "unknown", "healthy",
"degraded", "unhealthy", and "dead".
:paramtype health_status: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.EndpointHealthStatus
:keyword last_known_error: Last error obtained when a message failed to be delivered to iot
hub.
:paramtype last_known_error: str
:keyword last_known_error_time: Time at which the last known error occurred.
:paramtype last_known_error_time: ~datetime.datetime
:keyword last_successful_send_attempt_time: Last time iot hub successfully sent a message to
the endpoint.
:paramtype last_successful_send_attempt_time: ~datetime.datetime
:keyword last_send_attempt_time: Last time iot hub tried to send a message to the endpoint.
:paramtype last_send_attempt_time: ~datetime.datetime
"""
super().__init__(**kwargs)
self.endpoint_id = endpoint_id
self.health_status = health_status
self.last_known_error = last_known_error
self.last_known_error_time = last_known_error_time
self.last_successful_send_attempt_time = last_successful_send_attempt_time
self.last_send_attempt_time = last_send_attempt_time
class EndpointHealthDataListResult(_serialization.Model):
"""The JSON-serialized array of EndpointHealthData objects with a next link.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: JSON-serialized array of Endpoint health data.
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.EndpointHealthData]
:ivar next_link: Link to more results.
:vartype next_link: str
"""
_validation = {
"next_link": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[EndpointHealthData]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.EndpointHealthData"]] = None, **kwargs: Any) -> None:
"""
:keyword value: JSON-serialized array of Endpoint health data.
:paramtype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.EndpointHealthData]
"""
super().__init__(**kwargs)
self.value = value
self.next_link = None
class EnrichmentProperties(_serialization.Model):
"""The properties of an enrichment that your IoT hub applies to messages delivered to endpoints.
All required parameters must be populated in order to send to Azure.
:ivar key: The key or name for the enrichment property. Required.
:vartype key: str
:ivar value: The value for the enrichment property. Required.
:vartype value: str
:ivar endpoint_names: The list of endpoints for which the enrichment is applied to the message.
Required.
:vartype endpoint_names: list[str]
"""
_validation = {
"key": {"required": True},
"value": {"required": True},
"endpoint_names": {"required": True, "min_items": 1},
}
_attribute_map = {
"key": {"key": "key", "type": "str"},
"value": {"key": "value", "type": "str"},
"endpoint_names": {"key": "endpointNames", "type": "[str]"},
}
def __init__(self, *, key: str, value: str, endpoint_names: List[str], **kwargs: Any) -> None:
"""
:keyword key: The key or name for the enrichment property. Required.
:paramtype key: str
:keyword value: The value for the enrichment property. Required.
:paramtype value: str
:keyword endpoint_names: The list of endpoints for which the enrichment is applied to the
message. Required.
:paramtype endpoint_names: list[str]
"""
super().__init__(**kwargs)
self.key = key
self.value = value
self.endpoint_names = endpoint_names
class ErrorDetails(_serialization.Model):
"""Error details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: The error code.
:vartype code: str
:ivar http_status_code: The HTTP status code.
:vartype http_status_code: str
:ivar message: The error message.
:vartype message: str
:ivar details: The error details.
:vartype details: str
"""
_validation = {
"code": {"readonly": True},
"http_status_code": {"readonly": True},
"message": {"readonly": True},
"details": {"readonly": True},
}
_attribute_map = {
"code": {"key": "code", "type": "str"},
"http_status_code": {"key": "httpStatusCode", "type": "str"},
"message": {"key": "message", "type": "str"},
"details": {"key": "details", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.code = None
self.http_status_code = None
self.message = None
self.details = None
class EventHubConsumerGroupBodyDescription(_serialization.Model):
"""The EventHub consumer group.
All required parameters must be populated in order to send to Azure.
:ivar properties: The EventHub consumer group name. Required.
:vartype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.EventHubConsumerGroupName
"""
_validation = {
"properties": {"required": True},
}
_attribute_map = {
"properties": {"key": "properties", "type": "EventHubConsumerGroupName"},
}
def __init__(self, *, properties: "_models.EventHubConsumerGroupName", **kwargs: Any) -> None:
"""
:keyword properties: The EventHub consumer group name. Required.
:paramtype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.EventHubConsumerGroupName
"""
super().__init__(**kwargs)
self.properties = properties
class EventHubConsumerGroupInfo(_serialization.Model):
"""The properties of the EventHubConsumerGroupInfo object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar properties: The tags.
:vartype properties: dict[str, any]
:ivar id: The Event Hub-compatible consumer group identifier.
:vartype id: str
:ivar name: The Event Hub-compatible consumer group name.
:vartype name: str
:ivar type: the resource type.
:vartype type: str
:ivar etag: The etag.
:vartype etag: str
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"etag": {"readonly": True},
}
_attribute_map = {
"properties": {"key": "properties", "type": "{object}"},
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"etag": {"key": "etag", "type": "str"},
}
def __init__(self, *, properties: Optional[Dict[str, Any]] = None, **kwargs: Any) -> None:
"""
:keyword properties: The tags.
:paramtype properties: dict[str, any]
"""
super().__init__(**kwargs)
self.properties = properties
self.id = None
self.name = None
self.type = None
self.etag = None
class EventHubConsumerGroupName(_serialization.Model):
"""The EventHub consumer group name.
All required parameters must be populated in order to send to Azure.
:ivar name: EventHub consumer group name. Required.
:vartype name: str
"""
_validation = {
"name": {"required": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
}
def __init__(self, *, name: str, **kwargs: Any) -> None:
"""
:keyword name: EventHub consumer group name. Required.
:paramtype name: str
"""
super().__init__(**kwargs)
self.name = name
class EventHubConsumerGroupsListResult(_serialization.Model):
"""The JSON-serialized array of Event Hub-compatible consumer group names with a next link.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of consumer groups objects.
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.EventHubConsumerGroupInfo]
:ivar next_link: The next link.
:vartype next_link: str
"""
_validation = {
"next_link": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[EventHubConsumerGroupInfo]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.EventHubConsumerGroupInfo"]] = None, **kwargs: Any) -> None:
"""
:keyword value: List of consumer groups objects.
:paramtype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.EventHubConsumerGroupInfo]
"""
super().__init__(**kwargs)
self.value = value
self.next_link = None
class EventHubProperties(_serialization.Model):
"""The properties of the provisioned Event Hub-compatible endpoint used by the IoT hub.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar retention_time_in_days: The retention time for device-to-cloud messages in days. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages.
:vartype retention_time_in_days: int
:ivar partition_count: The number of partitions for receiving device-to-cloud messages in the
Event Hub-compatible endpoint. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages.
:vartype partition_count: int
:ivar partition_ids: The partition ids in the Event Hub-compatible endpoint.
:vartype partition_ids: list[str]
:ivar path: The Event Hub-compatible name.
:vartype path: str
:ivar endpoint: The Event Hub-compatible endpoint.
:vartype endpoint: str
"""
_validation = {
"partition_ids": {"readonly": True},
"path": {"readonly": True},
"endpoint": {"readonly": True},
}
_attribute_map = {
"retention_time_in_days": {"key": "retentionTimeInDays", "type": "int"},
"partition_count": {"key": "partitionCount", "type": "int"},
"partition_ids": {"key": "partitionIds", "type": "[str]"},
"path": {"key": "path", "type": "str"},
"endpoint": {"key": "endpoint", "type": "str"},
}
def __init__(
self, *, retention_time_in_days: Optional[int] = None, partition_count: Optional[int] = None, **kwargs: Any
) -> None:
"""
:keyword retention_time_in_days: The retention time for device-to-cloud messages in days. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages.
:paramtype retention_time_in_days: int
:keyword partition_count: The number of partitions for receiving device-to-cloud messages in
the Event Hub-compatible endpoint. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages.
:paramtype partition_count: int
"""
super().__init__(**kwargs)
self.retention_time_in_days = retention_time_in_days
self.partition_count = partition_count
self.partition_ids = None
self.path = None
self.endpoint = None
class ExportDevicesRequest(_serialization.Model):
"""Use to provide parameters when requesting an export of all devices in the IoT hub.
All required parameters must be populated in order to send to Azure.
:ivar export_blob_container_uri: The export blob container URI. Required.
:vartype export_blob_container_uri: str
:ivar exclude_keys: The value indicating whether keys should be excluded during export.
Required.
:vartype exclude_keys: bool
:ivar export_blob_name: The name of the blob that will be created in the provided output blob
container. This blob will contain the exported device registry information for the IoT Hub.
:vartype export_blob_name: str
:ivar authentication_type: Specifies authentication type being used for connecting to the
storage account. Known values are: "keyBased" and "identityBased".
:vartype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:ivar identity: Managed identity properties of storage endpoint for export devices.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:ivar include_configurations: The value indicating whether configurations should be exported.
:vartype include_configurations: bool
:ivar configurations_blob_name: The name of the blob that will be created in the provided
output blob container. This blob will contain the exported configurations for the Iot Hub.
:vartype configurations_blob_name: str
"""
_validation = {
"export_blob_container_uri": {"required": True},
"exclude_keys": {"required": True},
}
_attribute_map = {
"export_blob_container_uri": {"key": "exportBlobContainerUri", "type": "str"},
"exclude_keys": {"key": "excludeKeys", "type": "bool"},
"export_blob_name": {"key": "exportBlobName", "type": "str"},
"authentication_type": {"key": "authenticationType", "type": "str"},
"identity": {"key": "identity", "type": "ManagedIdentity"},
"include_configurations": {"key": "includeConfigurations", "type": "bool"},
"configurations_blob_name": {"key": "configurationsBlobName", "type": "str"},
}
def __init__(
self,
*,
export_blob_container_uri: str,
exclude_keys: bool,
export_blob_name: Optional[str] = None,
authentication_type: Optional[Union[str, "_models.AuthenticationType"]] = None,
identity: Optional["_models.ManagedIdentity"] = None,
include_configurations: Optional[bool] = None,
configurations_blob_name: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword export_blob_container_uri: The export blob container URI. Required.
:paramtype export_blob_container_uri: str
:keyword exclude_keys: The value indicating whether keys should be excluded during export.
Required.
:paramtype exclude_keys: bool
:keyword export_blob_name: The name of the blob that will be created in the provided output
blob container. This blob will contain the exported device registry information for the IoT
Hub.
:paramtype export_blob_name: str
:keyword authentication_type: Specifies authentication type being used for connecting to the
storage account. Known values are: "keyBased" and "identityBased".
:paramtype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:keyword identity: Managed identity properties of storage endpoint for export devices.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:keyword include_configurations: The value indicating whether configurations should be
exported.
:paramtype include_configurations: bool
:keyword configurations_blob_name: The name of the blob that will be created in the provided
output blob container. This blob will contain the exported configurations for the Iot Hub.
:paramtype configurations_blob_name: str
"""
super().__init__(**kwargs)
self.export_blob_container_uri = export_blob_container_uri
self.exclude_keys = exclude_keys
self.export_blob_name = export_blob_name
self.authentication_type = authentication_type
self.identity = identity
self.include_configurations = include_configurations
self.configurations_blob_name = configurations_blob_name
class FailoverInput(_serialization.Model):
"""Use to provide failover region when requesting manual Failover for a hub.
All required parameters must be populated in order to send to Azure.
:ivar failover_region: Region the hub will be failed over to. Required.
:vartype failover_region: str
"""
_validation = {
"failover_region": {"required": True},
}
_attribute_map = {
"failover_region": {"key": "failoverRegion", "type": "str"},
}
def __init__(self, *, failover_region: str, **kwargs: Any) -> None:
"""
:keyword failover_region: Region the hub will be failed over to. Required.
:paramtype failover_region: str
"""
super().__init__(**kwargs)
self.failover_region = failover_region
class FallbackRouteProperties(_serialization.Model):
"""The properties of the fallback route. IoT Hub uses these properties when it routes messages to
the fallback endpoint.
All required parameters must be populated in order to send to Azure.
:ivar name: The name of the route. The name can only include alphanumeric characters, periods,
underscores, hyphens, has a maximum length of 64 characters, and must be unique.
:vartype name: str
:ivar source: The source to which the routing rule is to be applied to. For example,
DeviceMessages. Required. Known values are: "Invalid", "DeviceMessages", "TwinChangeEvents",
"DeviceLifecycleEvents", "DeviceJobLifecycleEvents", "DigitalTwinChangeEvents",
"DeviceConnectionStateEvents", and "MqttBrokerMessages".
:vartype source: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingSource
:ivar condition: The condition which is evaluated in order to apply the fallback route. If the
condition is not provided it will evaluate to true by default. For grammar, See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language.
:vartype condition: str
:ivar endpoint_names: The list of endpoints to which the messages that satisfy the condition
are routed to. Currently only 1 endpoint is allowed. Required.
:vartype endpoint_names: list[str]
:ivar is_enabled: Used to specify whether the fallback route is enabled. Required.
:vartype is_enabled: bool
"""
_validation = {
"source": {"required": True},
"endpoint_names": {"required": True, "max_items": 1, "min_items": 1},
"is_enabled": {"required": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
"source": {"key": "source", "type": "str"},
"condition": {"key": "condition", "type": "str"},
"endpoint_names": {"key": "endpointNames", "type": "[str]"},
"is_enabled": {"key": "isEnabled", "type": "bool"},
}
def __init__(
self,
*,
source: Union[str, "_models.RoutingSource"],
endpoint_names: List[str],
is_enabled: bool,
name: Optional[str] = None,
condition: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword name: The name of the route. The name can only include alphanumeric characters,
periods, underscores, hyphens, has a maximum length of 64 characters, and must be unique.
:paramtype name: str
:keyword source: The source to which the routing rule is to be applied to. For example,
DeviceMessages. Required. Known values are: "Invalid", "DeviceMessages", "TwinChangeEvents",
"DeviceLifecycleEvents", "DeviceJobLifecycleEvents", "DigitalTwinChangeEvents",
"DeviceConnectionStateEvents", and "MqttBrokerMessages".
:paramtype source: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingSource
:keyword condition: The condition which is evaluated in order to apply the fallback route. If
the condition is not provided it will evaluate to true by default. For grammar, See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language.
:paramtype condition: str
:keyword endpoint_names: The list of endpoints to which the messages that satisfy the condition
are routed to. Currently only 1 endpoint is allowed. Required.
:paramtype endpoint_names: list[str]
:keyword is_enabled: Used to specify whether the fallback route is enabled. Required.
:paramtype is_enabled: bool
"""
super().__init__(**kwargs)
self.name = name
self.source = source
self.condition = condition
self.endpoint_names = endpoint_names
self.is_enabled = is_enabled
class FeedbackProperties(_serialization.Model):
"""The properties of the feedback queue for cloud-to-device messages.
:ivar lock_duration_as_iso8601: The lock duration for the feedback queue. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:vartype lock_duration_as_iso8601: ~datetime.timedelta
:ivar ttl_as_iso8601: The period of time for which a message is available to consume before it
is expired by the IoT hub. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:vartype ttl_as_iso8601: ~datetime.timedelta
:ivar max_delivery_count: The number of times the IoT hub attempts to deliver a message on the
feedback queue. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:vartype max_delivery_count: int
"""
_validation = {
"max_delivery_count": {"maximum": 100, "minimum": 1},
}
_attribute_map = {
"lock_duration_as_iso8601": {"key": "lockDurationAsIso8601", "type": "duration"},
"ttl_as_iso8601": {"key": "ttlAsIso8601", "type": "duration"},
"max_delivery_count": {"key": "maxDeliveryCount", "type": "int"},
}
def __init__(
self,
*,
lock_duration_as_iso8601: Optional[datetime.timedelta] = None,
ttl_as_iso8601: Optional[datetime.timedelta] = None,
max_delivery_count: Optional[int] = None,
**kwargs: Any
) -> None:
"""
:keyword lock_duration_as_iso8601: The lock duration for the feedback queue. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:paramtype lock_duration_as_iso8601: ~datetime.timedelta
:keyword ttl_as_iso8601: The period of time for which a message is available to consume before
it is expired by the IoT hub. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:paramtype ttl_as_iso8601: ~datetime.timedelta
:keyword max_delivery_count: The number of times the IoT hub attempts to deliver a message on
the feedback queue. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.
:paramtype max_delivery_count: int
"""
super().__init__(**kwargs)
self.lock_duration_as_iso8601 = lock_duration_as_iso8601
self.ttl_as_iso8601 = ttl_as_iso8601
self.max_delivery_count = max_delivery_count
class GroupIdInformation(_serialization.Model):
"""The group information for creating a private endpoint on an IotHub.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar properties: The properties for a group information object. Required.
:vartype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.GroupIdInformationProperties
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"properties": {"required": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"properties": {"key": "properties", "type": "GroupIdInformationProperties"},
}
def __init__(self, *, properties: "_models.GroupIdInformationProperties", **kwargs: Any) -> None:
"""
:keyword properties: The properties for a group information object. Required.
:paramtype properties:
~azure.mgmt.iothub.v2022_04_30_preview.models.GroupIdInformationProperties
"""
super().__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.properties = properties
class GroupIdInformationProperties(_serialization.Model):
"""The properties for a group information object.
:ivar group_id: The group id.
:vartype group_id: str
:ivar required_members: The required members for a specific group id.
:vartype required_members: list[str]
:ivar required_zone_names: The required DNS zones for a specific group id.
:vartype required_zone_names: list[str]
"""
_attribute_map = {
"group_id": {"key": "groupId", "type": "str"},
"required_members": {"key": "requiredMembers", "type": "[str]"},
"required_zone_names": {"key": "requiredZoneNames", "type": "[str]"},
}
def __init__(
self,
*,
group_id: Optional[str] = None,
required_members: Optional[List[str]] = None,
required_zone_names: Optional[List[str]] = None,
**kwargs: Any
) -> None:
"""
:keyword group_id: The group id.
:paramtype group_id: str
:keyword required_members: The required members for a specific group id.
:paramtype required_members: list[str]
:keyword required_zone_names: The required DNS zones for a specific group id.
:paramtype required_zone_names: list[str]
"""
super().__init__(**kwargs)
self.group_id = group_id
self.required_members = required_members
self.required_zone_names = required_zone_names
class ImportDevicesRequest(_serialization.Model):
"""Use to provide parameters when requesting an import of all devices in the hub.
All required parameters must be populated in order to send to Azure.
:ivar input_blob_container_uri: The input blob container URI. Required.
:vartype input_blob_container_uri: str
:ivar output_blob_container_uri: The output blob container URI. Required.
:vartype output_blob_container_uri: str
:ivar input_blob_name: The blob name to be used when importing from the provided input blob
container.
:vartype input_blob_name: str
:ivar output_blob_name: The blob name to use for storing the status of the import job.
:vartype output_blob_name: str
:ivar authentication_type: Specifies authentication type being used for connecting to the
storage account. Known values are: "keyBased" and "identityBased".
:vartype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:ivar identity: Managed identity properties of storage endpoint for import devices.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:ivar include_configurations: The value indicating whether configurations should be imported.
:vartype include_configurations: bool
:ivar configurations_blob_name: The blob name to be used when importing configurations from the
provided input blob container.
:vartype configurations_blob_name: str
"""
_validation = {
"input_blob_container_uri": {"required": True},
"output_blob_container_uri": {"required": True},
}
_attribute_map = {
"input_blob_container_uri": {"key": "inputBlobContainerUri", "type": "str"},
"output_blob_container_uri": {"key": "outputBlobContainerUri", "type": "str"},
"input_blob_name": {"key": "inputBlobName", "type": "str"},
"output_blob_name": {"key": "outputBlobName", "type": "str"},
"authentication_type": {"key": "authenticationType", "type": "str"},
"identity": {"key": "identity", "type": "ManagedIdentity"},
"include_configurations": {"key": "includeConfigurations", "type": "bool"},
"configurations_blob_name": {"key": "configurationsBlobName", "type": "str"},
}
def __init__(
self,
*,
input_blob_container_uri: str,
output_blob_container_uri: str,
input_blob_name: Optional[str] = None,
output_blob_name: Optional[str] = None,
authentication_type: Optional[Union[str, "_models.AuthenticationType"]] = None,
identity: Optional["_models.ManagedIdentity"] = None,
include_configurations: Optional[bool] = None,
configurations_blob_name: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword input_blob_container_uri: The input blob container URI. Required.
:paramtype input_blob_container_uri: str
:keyword output_blob_container_uri: The output blob container URI. Required.
:paramtype output_blob_container_uri: str
:keyword input_blob_name: The blob name to be used when importing from the provided input blob
container.
:paramtype input_blob_name: str
:keyword output_blob_name: The blob name to use for storing the status of the import job.
:paramtype output_blob_name: str
:keyword authentication_type: Specifies authentication type being used for connecting to the
storage account. Known values are: "keyBased" and "identityBased".
:paramtype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:keyword identity: Managed identity properties of storage endpoint for import devices.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:keyword include_configurations: The value indicating whether configurations should be
imported.
:paramtype include_configurations: bool
:keyword configurations_blob_name: The blob name to be used when importing configurations from
the provided input blob container.
:paramtype configurations_blob_name: str
"""
super().__init__(**kwargs)
self.input_blob_container_uri = input_blob_container_uri
self.output_blob_container_uri = output_blob_container_uri
self.input_blob_name = input_blob_name
self.output_blob_name = output_blob_name
self.authentication_type = authentication_type
self.identity = identity
self.include_configurations = include_configurations
self.configurations_blob_name = configurations_blob_name
class IotHubCapacity(_serialization.Model):
"""IoT Hub capacity information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar minimum: The minimum number of units.
:vartype minimum: int
:ivar maximum: The maximum number of units.
:vartype maximum: int
:ivar default: The default number of units.
:vartype default: int
:ivar scale_type: The type of the scaling enabled. Known values are: "Automatic", "Manual", and
"None".
:vartype scale_type: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubScaleType
"""
_validation = {
"minimum": {"readonly": True, "maximum": 1, "minimum": 1},
"maximum": {"readonly": True},
"default": {"readonly": True},
"scale_type": {"readonly": True},
}
_attribute_map = {
"minimum": {"key": "minimum", "type": "int"},
"maximum": {"key": "maximum", "type": "int"},
"default": {"key": "default", "type": "int"},
"scale_type": {"key": "scaleType", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.minimum = None
self.maximum = None
self.default = None
self.scale_type = None
class Resource(_serialization.Model):
"""The common properties of an Azure resource.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar location: The resource location. Required.
:vartype location: str
:ivar tags: The resource tags.
:vartype tags: dict[str, str]
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True, "pattern": r"^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$"},
"type": {"readonly": True},
"location": {"required": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
}
def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword location: The resource location. Required.
:paramtype location: str
:keyword tags: The resource tags.
:paramtype tags: dict[str, str]
"""
super().__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.location = location
self.tags = tags
class IotHubDescription(Resource):
"""The description of the IoT hub.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar location: The resource location. Required.
:vartype location: str
:ivar tags: The resource tags.
:vartype tags: dict[str, str]
:ivar etag: The Etag field is *not* required. If it is provided in the response body, it must
also be provided as a header per the normal ETag convention.
:vartype etag: str
:ivar properties: IotHub properties.
:vartype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubProperties
:ivar sku: IotHub SKU info. Required.
:vartype sku: ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubSkuInfo
:ivar identity: The managed identities for the IotHub.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ArmIdentity
:ivar system_data: The system meta data relating to this resource.
:vartype system_data: ~azure.mgmt.iothub.v2022_04_30_preview.models.SystemData
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True, "pattern": r"^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$"},
"type": {"readonly": True},
"location": {"required": True},
"sku": {"required": True},
"system_data": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"etag": {"key": "etag", "type": "str"},
"properties": {"key": "properties", "type": "IotHubProperties"},
"sku": {"key": "sku", "type": "IotHubSkuInfo"},
"identity": {"key": "identity", "type": "ArmIdentity"},
"system_data": {"key": "systemData", "type": "SystemData"},
}
def __init__(
self,
*,
location: str,
sku: "_models.IotHubSkuInfo",
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
properties: Optional["_models.IotHubProperties"] = None,
identity: Optional["_models.ArmIdentity"] = None,
**kwargs: Any
) -> None:
"""
:keyword location: The resource location. Required.
:paramtype location: str
:keyword tags: The resource tags.
:paramtype tags: dict[str, str]
:keyword etag: The Etag field is *not* required. If it is provided in the response body, it
must also be provided as a header per the normal ETag convention.
:paramtype etag: str
:keyword properties: IotHub properties.
:paramtype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubProperties
:keyword sku: IotHub SKU info. Required.
:paramtype sku: ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubSkuInfo
:keyword identity: The managed identities for the IotHub.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ArmIdentity
"""
super().__init__(location=location, tags=tags, **kwargs)
self.etag = etag
self.properties = properties
self.sku = sku
self.identity = identity
self.system_data = None
class IotHubDescriptionListResult(_serialization.Model):
"""The JSON-serialized array of IotHubDescription objects with a next link.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The array of IotHubDescription objects.
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubDescription]
:ivar next_link: The next link.
:vartype next_link: str
"""
_validation = {
"next_link": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[IotHubDescription]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.IotHubDescription"]] = None, **kwargs: Any) -> None:
"""
:keyword value: The array of IotHubDescription objects.
:paramtype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubDescription]
"""
super().__init__(**kwargs)
self.value = value
self.next_link = None
class IotHubLocationDescription(_serialization.Model):
"""Public representation of one of the locations where a resource is provisioned.
:ivar location: The name of the Azure region.
:vartype location: str
:ivar role: The role of the region, can be either primary or secondary. The primary region is
where the IoT hub is currently provisioned. The secondary region is the Azure disaster recovery
(DR) paired region and also the region where the IoT hub can failover to. Known values are:
"primary" and "secondary".
:vartype role: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubReplicaRoleType
"""
_attribute_map = {
"location": {"key": "location", "type": "str"},
"role": {"key": "role", "type": "str"},
}
def __init__(
self,
*,
location: Optional[str] = None,
role: Optional[Union[str, "_models.IotHubReplicaRoleType"]] = None,
**kwargs: Any
) -> None:
"""
:keyword location: The name of the Azure region.
:paramtype location: str
:keyword role: The role of the region, can be either primary or secondary. The primary region
is where the IoT hub is currently provisioned. The secondary region is the Azure disaster
recovery (DR) paired region and also the region where the IoT hub can failover to. Known values
are: "primary" and "secondary".
:paramtype role: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubReplicaRoleType
"""
super().__init__(**kwargs)
self.location = location
self.role = role
class IotHubNameAvailabilityInfo(_serialization.Model):
"""The properties indicating whether a given IoT hub name is available.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name_available: The value which indicates whether the provided name is available.
:vartype name_available: bool
:ivar reason: The reason for unavailability. Known values are: "Invalid" and "AlreadyExists".
:vartype reason: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubNameUnavailabilityReason
:ivar message: The detailed reason message.
:vartype message: str
"""
_validation = {
"name_available": {"readonly": True},
"reason": {"readonly": True},
}
_attribute_map = {
"name_available": {"key": "nameAvailable", "type": "bool"},
"reason": {"key": "reason", "type": "str"},
"message": {"key": "message", "type": "str"},
}
def __init__(self, *, message: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword message: The detailed reason message.
:paramtype message: str
"""
super().__init__(**kwargs)
self.name_available = None
self.reason = None
self.message = message
class IotHubProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes
"""The properties of an IoT hub.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar authorization_policies: The shared access policies you can use to secure a connection to
the IoT hub.
:vartype authorization_policies:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.SharedAccessSignatureAuthorizationRule]
:ivar disable_local_auth: If true, SAS tokens with Iot hub scoped SAS keys cannot be used for
authentication.
:vartype disable_local_auth: bool
:ivar disable_device_sas: If true, all device(including Edge devices but excluding modules)
scoped SAS keys cannot be used for authentication.
:vartype disable_device_sas: bool
:ivar disable_module_sas: If true, all module scoped SAS keys cannot be used for
authentication.
:vartype disable_module_sas: bool
:ivar restrict_outbound_network_access: If true, egress from IotHub will be restricted to only
the allowed FQDNs that are configured via allowedFqdnList.
:vartype restrict_outbound_network_access: bool
:ivar allowed_fqdn_list: List of allowed FQDNs(Fully Qualified Domain Name) for egress from Iot
Hub.
:vartype allowed_fqdn_list: list[str]
:ivar public_network_access: Whether requests from Public Network are allowed. Known values
are: "Enabled" and "Disabled".
:vartype public_network_access: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.PublicNetworkAccess
:ivar ip_filter_rules: The IP filter rules.
:vartype ip_filter_rules: list[~azure.mgmt.iothub.v2022_04_30_preview.models.IpFilterRule]
:ivar network_rule_sets: Network Rule Set Properties of IotHub.
:vartype network_rule_sets:
~azure.mgmt.iothub.v2022_04_30_preview.models.NetworkRuleSetProperties
:ivar min_tls_version: Specifies the minimum TLS version to support for this hub. Can be set to
"1.2" to have clients that use a TLS version below 1.2 to be rejected.
:vartype min_tls_version: str
:ivar private_endpoint_connections: Private endpoint connections created on this IotHub.
:vartype private_endpoint_connections:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateEndpointConnection]
:ivar provisioning_state: The provisioning state.
:vartype provisioning_state: str
:ivar state: The hub state.
:vartype state: str
:ivar host_name: The name of the host.
:vartype host_name: str
:ivar event_hub_endpoints: The Event Hub-compatible endpoint properties. The only possible keys
to this dictionary is events. This key has to be present in the dictionary while making create
or update calls for the IoT hub.
:vartype event_hub_endpoints: dict[str,
~azure.mgmt.iothub.v2022_04_30_preview.models.EventHubProperties]
:ivar routing: The routing related properties of the IoT hub. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging.
:vartype routing: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingProperties
:ivar storage_endpoints: The list of Azure Storage endpoints where you can upload files.
Currently you can configure only one Azure Storage account and that MUST have its key as
$default. Specifying more than one storage account causes an error to be thrown. Not specifying
a value for this property when the enableFileUploadNotifications property is set to True,
causes an error to be thrown.
:vartype storage_endpoints: dict[str,
~azure.mgmt.iothub.v2022_04_30_preview.models.StorageEndpointProperties]
:ivar messaging_endpoints: The messaging endpoint properties for the file upload notification
queue.
:vartype messaging_endpoints: dict[str,
~azure.mgmt.iothub.v2022_04_30_preview.models.MessagingEndpointProperties]
:ivar enable_file_upload_notifications: If True, file upload notifications are enabled.
:vartype enable_file_upload_notifications: bool
:ivar cloud_to_device: The IoT hub cloud-to-device messaging properties.
:vartype cloud_to_device: ~azure.mgmt.iothub.v2022_04_30_preview.models.CloudToDeviceProperties
:ivar comments: IoT hub comments.
:vartype comments: str
:ivar device_streams: The device streams properties of iothub.
:vartype device_streams:
~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubPropertiesDeviceStreams
:ivar features: The capabilities and features enabled for the IoT hub. Known values are: "None"
and "DeviceManagement".
:vartype features: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.Capabilities
:ivar encryption: The encryption properties for the IoT hub.
:vartype encryption:
~azure.mgmt.iothub.v2022_04_30_preview.models.EncryptionPropertiesDescription
:ivar locations: Primary and secondary location for iot hub.
:vartype locations:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubLocationDescription]
:ivar enable_data_residency: This property when set to true, will enable data residency, thus,
disabling disaster recovery.
:vartype enable_data_residency: bool
:ivar root_certificate: This property store root certificate related information.
:vartype root_certificate:
~azure.mgmt.iothub.v2022_04_30_preview.models.RootCertificateProperties
"""
_validation = {
"provisioning_state": {"readonly": True},
"state": {"readonly": True},
"host_name": {"readonly": True},
"locations": {"readonly": True},
}
_attribute_map = {
"authorization_policies": {"key": "authorizationPolicies", "type": "[SharedAccessSignatureAuthorizationRule]"},
"disable_local_auth": {"key": "disableLocalAuth", "type": "bool"},
"disable_device_sas": {"key": "disableDeviceSAS", "type": "bool"},
"disable_module_sas": {"key": "disableModuleSAS", "type": "bool"},
"restrict_outbound_network_access": {"key": "restrictOutboundNetworkAccess", "type": "bool"},
"allowed_fqdn_list": {"key": "allowedFqdnList", "type": "[str]"},
"public_network_access": {"key": "publicNetworkAccess", "type": "str"},
"ip_filter_rules": {"key": "ipFilterRules", "type": "[IpFilterRule]"},
"network_rule_sets": {"key": "networkRuleSets", "type": "NetworkRuleSetProperties"},
"min_tls_version": {"key": "minTlsVersion", "type": "str"},
"private_endpoint_connections": {"key": "privateEndpointConnections", "type": "[PrivateEndpointConnection]"},
"provisioning_state": {"key": "provisioningState", "type": "str"},
"state": {"key": "state", "type": "str"},
"host_name": {"key": "hostName", "type": "str"},
"event_hub_endpoints": {"key": "eventHubEndpoints", "type": "{EventHubProperties}"},
"routing": {"key": "routing", "type": "RoutingProperties"},
"storage_endpoints": {"key": "storageEndpoints", "type": "{StorageEndpointProperties}"},
"messaging_endpoints": {"key": "messagingEndpoints", "type": "{MessagingEndpointProperties}"},
"enable_file_upload_notifications": {"key": "enableFileUploadNotifications", "type": "bool"},
"cloud_to_device": {"key": "cloudToDevice", "type": "CloudToDeviceProperties"},
"comments": {"key": "comments", "type": "str"},
"device_streams": {"key": "deviceStreams", "type": "IotHubPropertiesDeviceStreams"},
"features": {"key": "features", "type": "str"},
"encryption": {"key": "encryption", "type": "EncryptionPropertiesDescription"},
"locations": {"key": "locations", "type": "[IotHubLocationDescription]"},
"enable_data_residency": {"key": "enableDataResidency", "type": "bool"},
"root_certificate": {"key": "rootCertificate", "type": "RootCertificateProperties"},
}
def __init__( # pylint: disable=too-many-locals
self,
*,
authorization_policies: Optional[List["_models.SharedAccessSignatureAuthorizationRule"]] = None,
disable_local_auth: Optional[bool] = None,
disable_device_sas: Optional[bool] = None,
disable_module_sas: Optional[bool] = None,
restrict_outbound_network_access: Optional[bool] = None,
allowed_fqdn_list: Optional[List[str]] = None,
public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None,
ip_filter_rules: Optional[List["_models.IpFilterRule"]] = None,
network_rule_sets: Optional["_models.NetworkRuleSetProperties"] = None,
min_tls_version: Optional[str] = None,
private_endpoint_connections: Optional[List["_models.PrivateEndpointConnection"]] = None,
event_hub_endpoints: Optional[Dict[str, "_models.EventHubProperties"]] = None,
routing: Optional["_models.RoutingProperties"] = None,
storage_endpoints: Optional[Dict[str, "_models.StorageEndpointProperties"]] = None,
messaging_endpoints: Optional[Dict[str, "_models.MessagingEndpointProperties"]] = None,
enable_file_upload_notifications: Optional[bool] = None,
cloud_to_device: Optional["_models.CloudToDeviceProperties"] = None,
comments: Optional[str] = None,
device_streams: Optional["_models.IotHubPropertiesDeviceStreams"] = None,
features: Optional[Union[str, "_models.Capabilities"]] = None,
encryption: Optional["_models.EncryptionPropertiesDescription"] = None,
enable_data_residency: Optional[bool] = None,
root_certificate: Optional["_models.RootCertificateProperties"] = None,
**kwargs: Any
) -> None:
"""
:keyword authorization_policies: The shared access policies you can use to secure a connection
to the IoT hub.
:paramtype authorization_policies:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.SharedAccessSignatureAuthorizationRule]
:keyword disable_local_auth: If true, SAS tokens with Iot hub scoped SAS keys cannot be used
for authentication.
:paramtype disable_local_auth: bool
:keyword disable_device_sas: If true, all device(including Edge devices but excluding modules)
scoped SAS keys cannot be used for authentication.
:paramtype disable_device_sas: bool
:keyword disable_module_sas: If true, all module scoped SAS keys cannot be used for
authentication.
:paramtype disable_module_sas: bool
:keyword restrict_outbound_network_access: If true, egress from IotHub will be restricted to
only the allowed FQDNs that are configured via allowedFqdnList.
:paramtype restrict_outbound_network_access: bool
:keyword allowed_fqdn_list: List of allowed FQDNs(Fully Qualified Domain Name) for egress from
Iot Hub.
:paramtype allowed_fqdn_list: list[str]
:keyword public_network_access: Whether requests from Public Network are allowed. Known values
are: "Enabled" and "Disabled".
:paramtype public_network_access: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.PublicNetworkAccess
:keyword ip_filter_rules: The IP filter rules.
:paramtype ip_filter_rules: list[~azure.mgmt.iothub.v2022_04_30_preview.models.IpFilterRule]
:keyword network_rule_sets: Network Rule Set Properties of IotHub.
:paramtype network_rule_sets:
~azure.mgmt.iothub.v2022_04_30_preview.models.NetworkRuleSetProperties
:keyword min_tls_version: Specifies the minimum TLS version to support for this hub. Can be set
to "1.2" to have clients that use a TLS version below 1.2 to be rejected.
:paramtype min_tls_version: str
:keyword private_endpoint_connections: Private endpoint connections created on this IotHub.
:paramtype private_endpoint_connections:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateEndpointConnection]
:keyword event_hub_endpoints: The Event Hub-compatible endpoint properties. The only possible
keys to this dictionary is events. This key has to be present in the dictionary while making
create or update calls for the IoT hub.
:paramtype event_hub_endpoints: dict[str,
~azure.mgmt.iothub.v2022_04_30_preview.models.EventHubProperties]
:keyword routing: The routing related properties of the IoT hub. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging.
:paramtype routing: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingProperties
:keyword storage_endpoints: The list of Azure Storage endpoints where you can upload files.
Currently you can configure only one Azure Storage account and that MUST have its key as
$default. Specifying more than one storage account causes an error to be thrown. Not specifying
a value for this property when the enableFileUploadNotifications property is set to True,
causes an error to be thrown.
:paramtype storage_endpoints: dict[str,
~azure.mgmt.iothub.v2022_04_30_preview.models.StorageEndpointProperties]
:keyword messaging_endpoints: The messaging endpoint properties for the file upload
notification queue.
:paramtype messaging_endpoints: dict[str,
~azure.mgmt.iothub.v2022_04_30_preview.models.MessagingEndpointProperties]
:keyword enable_file_upload_notifications: If True, file upload notifications are enabled.
:paramtype enable_file_upload_notifications: bool
:keyword cloud_to_device: The IoT hub cloud-to-device messaging properties.
:paramtype cloud_to_device:
~azure.mgmt.iothub.v2022_04_30_preview.models.CloudToDeviceProperties
:keyword comments: IoT hub comments.
:paramtype comments: str
:keyword device_streams: The device streams properties of iothub.
:paramtype device_streams:
~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubPropertiesDeviceStreams
:keyword features: The capabilities and features enabled for the IoT hub. Known values are:
"None" and "DeviceManagement".
:paramtype features: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.Capabilities
:keyword encryption: The encryption properties for the IoT hub.
:paramtype encryption:
~azure.mgmt.iothub.v2022_04_30_preview.models.EncryptionPropertiesDescription
:keyword enable_data_residency: This property when set to true, will enable data residency,
thus, disabling disaster recovery.
:paramtype enable_data_residency: bool
:keyword root_certificate: This property store root certificate related information.
:paramtype root_certificate:
~azure.mgmt.iothub.v2022_04_30_preview.models.RootCertificateProperties
"""
super().__init__(**kwargs)
self.authorization_policies = authorization_policies
self.disable_local_auth = disable_local_auth
self.disable_device_sas = disable_device_sas
self.disable_module_sas = disable_module_sas
self.restrict_outbound_network_access = restrict_outbound_network_access
self.allowed_fqdn_list = allowed_fqdn_list
self.public_network_access = public_network_access
self.ip_filter_rules = ip_filter_rules
self.network_rule_sets = network_rule_sets
self.min_tls_version = min_tls_version
self.private_endpoint_connections = private_endpoint_connections
self.provisioning_state = None
self.state = None
self.host_name = None
self.event_hub_endpoints = event_hub_endpoints
self.routing = routing
self.storage_endpoints = storage_endpoints
self.messaging_endpoints = messaging_endpoints
self.enable_file_upload_notifications = enable_file_upload_notifications
self.cloud_to_device = cloud_to_device
self.comments = comments
self.device_streams = device_streams
self.features = features
self.encryption = encryption
self.locations = None
self.enable_data_residency = enable_data_residency
self.root_certificate = root_certificate
class IotHubPropertiesDeviceStreams(_serialization.Model):
"""The device streams properties of iothub.
:ivar streaming_endpoints: List of Device Streams Endpoints.
:vartype streaming_endpoints: list[str]
"""
_attribute_map = {
"streaming_endpoints": {"key": "streamingEndpoints", "type": "[str]"},
}
def __init__(self, *, streaming_endpoints: Optional[List[str]] = None, **kwargs: Any) -> None:
"""
:keyword streaming_endpoints: List of Device Streams Endpoints.
:paramtype streaming_endpoints: list[str]
"""
super().__init__(**kwargs)
self.streaming_endpoints = streaming_endpoints
class IotHubQuotaMetricInfo(_serialization.Model):
"""Quota metrics properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: The name of the quota metric.
:vartype name: str
:ivar current_value: The current value for the quota metric.
:vartype current_value: int
:ivar max_value: The maximum value of the quota metric.
:vartype max_value: int
"""
_validation = {
"name": {"readonly": True},
"current_value": {"readonly": True},
"max_value": {"readonly": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
"current_value": {"key": "currentValue", "type": "int"},
"max_value": {"key": "maxValue", "type": "int"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.name = None
self.current_value = None
self.max_value = None
class IotHubQuotaMetricInfoListResult(_serialization.Model):
"""The JSON-serialized array of IotHubQuotaMetricInfo objects with a next link.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The array of quota metrics objects.
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubQuotaMetricInfo]
:ivar next_link: The next link.
:vartype next_link: str
"""
_validation = {
"next_link": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[IotHubQuotaMetricInfo]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.IotHubQuotaMetricInfo"]] = None, **kwargs: Any) -> None:
"""
:keyword value: The array of quota metrics objects.
:paramtype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubQuotaMetricInfo]
"""
super().__init__(**kwargs)
self.value = value
self.next_link = None
class IotHubSkuDescription(_serialization.Model):
"""SKU properties.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar resource_type: The type of the resource.
:vartype resource_type: str
:ivar sku: The type of the resource. Required.
:vartype sku: ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubSkuInfo
:ivar capacity: IotHub capacity. Required.
:vartype capacity: ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubCapacity
"""
_validation = {
"resource_type": {"readonly": True},
"sku": {"required": True},
"capacity": {"required": True},
}
_attribute_map = {
"resource_type": {"key": "resourceType", "type": "str"},
"sku": {"key": "sku", "type": "IotHubSkuInfo"},
"capacity": {"key": "capacity", "type": "IotHubCapacity"},
}
def __init__(self, *, sku: "_models.IotHubSkuInfo", capacity: "_models.IotHubCapacity", **kwargs: Any) -> None:
"""
:keyword sku: The type of the resource. Required.
:paramtype sku: ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubSkuInfo
:keyword capacity: IotHub capacity. Required.
:paramtype capacity: ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubCapacity
"""
super().__init__(**kwargs)
self.resource_type = None
self.sku = sku
self.capacity = capacity
class IotHubSkuDescriptionListResult(_serialization.Model):
"""The JSON-serialized array of IotHubSkuDescription objects with a next link.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The array of IotHubSkuDescription.
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubSkuDescription]
:ivar next_link: The next link.
:vartype next_link: str
"""
_validation = {
"next_link": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[IotHubSkuDescription]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.IotHubSkuDescription"]] = None, **kwargs: Any) -> None:
"""
:keyword value: The array of IotHubSkuDescription.
:paramtype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubSkuDescription]
"""
super().__init__(**kwargs)
self.value = value
self.next_link = None
class IotHubSkuInfo(_serialization.Model):
"""Information about the SKU of the IoT hub.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar name: The name of the SKU. Required. Known values are: "F1", "S1", "S2", "S3", "B1",
"B2", and "B3".
:vartype name: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubSku
:ivar tier: The billing tier for the IoT hub. Known values are: "Free", "Standard", and
"Basic".
:vartype tier: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubSkuTier
:ivar capacity: The number of provisioned IoT Hub units. See:
https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits.
:vartype capacity: int
"""
_validation = {
"name": {"required": True},
"tier": {"readonly": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
"tier": {"key": "tier", "type": "str"},
"capacity": {"key": "capacity", "type": "int"},
}
def __init__(self, *, name: Union[str, "_models.IotHubSku"], capacity: Optional[int] = None, **kwargs: Any) -> None:
"""
:keyword name: The name of the SKU. Required. Known values are: "F1", "S1", "S2", "S3", "B1",
"B2", and "B3".
:paramtype name: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.IotHubSku
:keyword capacity: The number of provisioned IoT Hub units. See:
https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits.
:paramtype capacity: int
"""
super().__init__(**kwargs)
self.name = name
self.tier = None
self.capacity = capacity
class IpFilterRule(_serialization.Model):
"""The IP filter rules for the IoT hub.
All required parameters must be populated in order to send to Azure.
:ivar filter_name: The name of the IP filter rule. Required.
:vartype filter_name: str
:ivar action: The desired action for requests captured by this rule. Required. Known values
are: "Accept" and "Reject".
:vartype action: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.IpFilterActionType
:ivar ip_mask: A string that contains the IP address range in CIDR notation for the rule.
Required.
:vartype ip_mask: str
"""
_validation = {
"filter_name": {"required": True},
"action": {"required": True},
"ip_mask": {"required": True},
}
_attribute_map = {
"filter_name": {"key": "filterName", "type": "str"},
"action": {"key": "action", "type": "str"},
"ip_mask": {"key": "ipMask", "type": "str"},
}
def __init__(
self, *, filter_name: str, action: Union[str, "_models.IpFilterActionType"], ip_mask: str, **kwargs: Any
) -> None:
"""
:keyword filter_name: The name of the IP filter rule. Required.
:paramtype filter_name: str
:keyword action: The desired action for requests captured by this rule. Required. Known values
are: "Accept" and "Reject".
:paramtype action: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.IpFilterActionType
:keyword ip_mask: A string that contains the IP address range in CIDR notation for the rule.
Required.
:paramtype ip_mask: str
"""
super().__init__(**kwargs)
self.filter_name = filter_name
self.action = action
self.ip_mask = ip_mask
class JobResponse(_serialization.Model):
"""The properties of the Job Response object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar job_id: The job identifier.
:vartype job_id: str
:ivar start_time_utc: The start time of the job.
:vartype start_time_utc: ~datetime.datetime
:ivar end_time_utc: The time the job stopped processing.
:vartype end_time_utc: ~datetime.datetime
:ivar type: The type of the job. Known values are: "unknown", "export", "import", "backup",
"readDeviceProperties", "writeDeviceProperties", "updateDeviceConfiguration", "rebootDevice",
"factoryResetDevice", and "firmwareUpdate".
:vartype type: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.JobType
:ivar status: The status of the job. Known values are: "unknown", "enqueued", "running",
"completed", "failed", and "cancelled".
:vartype status: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.JobStatus
:ivar failure_reason: If status == failed, this string containing the reason for the failure.
:vartype failure_reason: str
:ivar status_message: The status message for the job.
:vartype status_message: str
:ivar parent_job_id: The job identifier of the parent job, if any.
:vartype parent_job_id: str
"""
_validation = {
"job_id": {"readonly": True},
"start_time_utc": {"readonly": True},
"end_time_utc": {"readonly": True},
"type": {"readonly": True},
"status": {"readonly": True},
"failure_reason": {"readonly": True},
"status_message": {"readonly": True},
"parent_job_id": {"readonly": True},
}
_attribute_map = {
"job_id": {"key": "jobId", "type": "str"},
"start_time_utc": {"key": "startTimeUtc", "type": "rfc-1123"},
"end_time_utc": {"key": "endTimeUtc", "type": "rfc-1123"},
"type": {"key": "type", "type": "str"},
"status": {"key": "status", "type": "str"},
"failure_reason": {"key": "failureReason", "type": "str"},
"status_message": {"key": "statusMessage", "type": "str"},
"parent_job_id": {"key": "parentJobId", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.job_id = None
self.start_time_utc = None
self.end_time_utc = None
self.type = None
self.status = None
self.failure_reason = None
self.status_message = None
self.parent_job_id = None
class JobResponseListResult(_serialization.Model):
"""The JSON-serialized array of JobResponse objects with a next link.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The array of JobResponse objects.
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.JobResponse]
:ivar next_link: The next link.
:vartype next_link: str
"""
_validation = {
"next_link": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[JobResponse]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.JobResponse"]] = None, **kwargs: Any) -> None:
"""
:keyword value: The array of JobResponse objects.
:paramtype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.JobResponse]
"""
super().__init__(**kwargs)
self.value = value
self.next_link = None
class KeyVaultKeyProperties(_serialization.Model):
"""The properties of the KeyVault key.
:ivar key_identifier: The identifier of the key.
:vartype key_identifier: str
:ivar identity: Managed identity properties of KeyVault Key.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
"""
_attribute_map = {
"key_identifier": {"key": "keyIdentifier", "type": "str"},
"identity": {"key": "identity", "type": "ManagedIdentity"},
}
def __init__(
self,
*,
key_identifier: Optional[str] = None,
identity: Optional["_models.ManagedIdentity"] = None,
**kwargs: Any
) -> None:
"""
:keyword key_identifier: The identifier of the key.
:paramtype key_identifier: str
:keyword identity: Managed identity properties of KeyVault Key.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
"""
super().__init__(**kwargs)
self.key_identifier = key_identifier
self.identity = identity
class ManagedIdentity(_serialization.Model):
"""The properties of the Managed identity.
:ivar user_assigned_identity: The user assigned identity.
:vartype user_assigned_identity: str
"""
_attribute_map = {
"user_assigned_identity": {"key": "userAssignedIdentity", "type": "str"},
}
def __init__(self, *, user_assigned_identity: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword user_assigned_identity: The user assigned identity.
:paramtype user_assigned_identity: str
"""
super().__init__(**kwargs)
self.user_assigned_identity = user_assigned_identity
class MatchedRoute(_serialization.Model):
"""Routes that matched.
:ivar properties: Properties of routes that matched.
:vartype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteProperties
"""
_attribute_map = {
"properties": {"key": "properties", "type": "RouteProperties"},
}
def __init__(self, *, properties: Optional["_models.RouteProperties"] = None, **kwargs: Any) -> None:
"""
:keyword properties: Properties of routes that matched.
:paramtype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteProperties
"""
super().__init__(**kwargs)
self.properties = properties
class MessagingEndpointProperties(_serialization.Model):
"""The properties of the messaging endpoints used by this IoT hub.
:ivar lock_duration_as_iso8601: The lock duration. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload.
:vartype lock_duration_as_iso8601: ~datetime.timedelta
:ivar ttl_as_iso8601: The period of time for which a message is available to consume before it
is expired by the IoT hub. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload.
:vartype ttl_as_iso8601: ~datetime.timedelta
:ivar max_delivery_count: The number of times the IoT hub attempts to deliver a message. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload.
:vartype max_delivery_count: int
"""
_validation = {
"max_delivery_count": {"maximum": 100, "minimum": 1},
}
_attribute_map = {
"lock_duration_as_iso8601": {"key": "lockDurationAsIso8601", "type": "duration"},
"ttl_as_iso8601": {"key": "ttlAsIso8601", "type": "duration"},
"max_delivery_count": {"key": "maxDeliveryCount", "type": "int"},
}
def __init__(
self,
*,
lock_duration_as_iso8601: Optional[datetime.timedelta] = None,
ttl_as_iso8601: Optional[datetime.timedelta] = None,
max_delivery_count: Optional[int] = None,
**kwargs: Any
) -> None:
"""
:keyword lock_duration_as_iso8601: The lock duration. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload.
:paramtype lock_duration_as_iso8601: ~datetime.timedelta
:keyword ttl_as_iso8601: The period of time for which a message is available to consume before
it is expired by the IoT hub. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload.
:paramtype ttl_as_iso8601: ~datetime.timedelta
:keyword max_delivery_count: The number of times the IoT hub attempts to deliver a message.
See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload.
:paramtype max_delivery_count: int
"""
super().__init__(**kwargs)
self.lock_duration_as_iso8601 = lock_duration_as_iso8601
self.ttl_as_iso8601 = ttl_as_iso8601
self.max_delivery_count = max_delivery_count
class Name(_serialization.Model):
"""Name of Iot Hub type.
:ivar value: IotHub type.
:vartype value: str
:ivar localized_value: Localized value of name.
:vartype localized_value: str
"""
_attribute_map = {
"value": {"key": "value", "type": "str"},
"localized_value": {"key": "localizedValue", "type": "str"},
}
def __init__(self, *, value: Optional[str] = None, localized_value: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword value: IotHub type.
:paramtype value: str
:keyword localized_value: Localized value of name.
:paramtype localized_value: str
"""
super().__init__(**kwargs)
self.value = value
self.localized_value = localized_value
class NetworkRuleSetIpRule(_serialization.Model):
"""IP Rule to be applied as part of Network Rule Set.
All required parameters must be populated in order to send to Azure.
:ivar filter_name: Name of the IP filter rule. Required.
:vartype filter_name: str
:ivar action: IP Filter Action. "Allow"
:vartype action: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.NetworkRuleIPAction
:ivar ip_mask: A string that contains the IP address range in CIDR notation for the rule.
Required.
:vartype ip_mask: str
"""
_validation = {
"filter_name": {"required": True},
"ip_mask": {"required": True},
}
_attribute_map = {
"filter_name": {"key": "filterName", "type": "str"},
"action": {"key": "action", "type": "str"},
"ip_mask": {"key": "ipMask", "type": "str"},
}
def __init__(
self,
*,
filter_name: str,
ip_mask: str,
action: Union[str, "_models.NetworkRuleIPAction"] = "Allow",
**kwargs: Any
) -> None:
"""
:keyword filter_name: Name of the IP filter rule. Required.
:paramtype filter_name: str
:keyword action: IP Filter Action. "Allow"
:paramtype action: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.NetworkRuleIPAction
:keyword ip_mask: A string that contains the IP address range in CIDR notation for the rule.
Required.
:paramtype ip_mask: str
"""
super().__init__(**kwargs)
self.filter_name = filter_name
self.action = action
self.ip_mask = ip_mask
class NetworkRuleSetProperties(_serialization.Model):
"""Network Rule Set Properties of IotHub.
All required parameters must be populated in order to send to Azure.
:ivar default_action: Default Action for Network Rule Set. Known values are: "Deny" and
"Allow".
:vartype default_action: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.DefaultAction
:ivar apply_to_built_in_event_hub_endpoint: If True, then Network Rule Set is also applied to
BuiltIn EventHub EndPoint of IotHub. Required.
:vartype apply_to_built_in_event_hub_endpoint: bool
:ivar ip_rules: List of IP Rules. Required.
:vartype ip_rules: list[~azure.mgmt.iothub.v2022_04_30_preview.models.NetworkRuleSetIpRule]
"""
_validation = {
"apply_to_built_in_event_hub_endpoint": {"required": True},
"ip_rules": {"required": True},
}
_attribute_map = {
"default_action": {"key": "defaultAction", "type": "str"},
"apply_to_built_in_event_hub_endpoint": {"key": "applyToBuiltInEventHubEndpoint", "type": "bool"},
"ip_rules": {"key": "ipRules", "type": "[NetworkRuleSetIpRule]"},
}
def __init__(
self,
*,
apply_to_built_in_event_hub_endpoint: bool,
ip_rules: List["_models.NetworkRuleSetIpRule"],
default_action: Union[str, "_models.DefaultAction"] = "Deny",
**kwargs: Any
) -> None:
"""
:keyword default_action: Default Action for Network Rule Set. Known values are: "Deny" and
"Allow".
:paramtype default_action: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.DefaultAction
:keyword apply_to_built_in_event_hub_endpoint: If True, then Network Rule Set is also applied
to BuiltIn EventHub EndPoint of IotHub. Required.
:paramtype apply_to_built_in_event_hub_endpoint: bool
:keyword ip_rules: List of IP Rules. Required.
:paramtype ip_rules: list[~azure.mgmt.iothub.v2022_04_30_preview.models.NetworkRuleSetIpRule]
"""
super().__init__(**kwargs)
self.default_action = default_action
self.apply_to_built_in_event_hub_endpoint = apply_to_built_in_event_hub_endpoint
self.ip_rules = ip_rules
class Operation(_serialization.Model):
"""IoT Hub REST API operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Operation name: {provider}/{resource}/{read | write | action | delete}.
:vartype name: str
:ivar display: The object that represents the operation.
:vartype display: ~azure.mgmt.iothub.v2022_04_30_preview.models.OperationDisplay
"""
_validation = {
"name": {"readonly": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
"display": {"key": "display", "type": "OperationDisplay"},
}
def __init__(self, *, display: Optional["_models.OperationDisplay"] = None, **kwargs: Any) -> None:
"""
:keyword display: The object that represents the operation.
:paramtype display: ~azure.mgmt.iothub.v2022_04_30_preview.models.OperationDisplay
"""
super().__init__(**kwargs)
self.name = None
self.display = display
class OperationDisplay(_serialization.Model):
"""The object that represents the operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar provider: Service provider: Microsoft Devices.
:vartype provider: str
:ivar resource: Resource Type: IotHubs.
:vartype resource: str
:ivar operation: Name of the operation.
:vartype operation: str
:ivar description: Description of the operation.
:vartype description: str
"""
_validation = {
"provider": {"readonly": True},
"resource": {"readonly": True},
"operation": {"readonly": True},
"description": {"readonly": True},
}
_attribute_map = {
"provider": {"key": "provider", "type": "str"},
"resource": {"key": "resource", "type": "str"},
"operation": {"key": "operation", "type": "str"},
"description": {"key": "description", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.provider = None
self.resource = None
self.operation = None
self.description = None
class OperationInputs(_serialization.Model):
"""Input values.
All required parameters must be populated in order to send to Azure.
:ivar name: The name of the IoT hub to check. Required.
:vartype name: str
"""
_validation = {
"name": {"required": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
}
def __init__(self, *, name: str, **kwargs: Any) -> None:
"""
:keyword name: The name of the IoT hub to check. Required.
:paramtype name: str
"""
super().__init__(**kwargs)
self.name = name
class OperationListResult(_serialization.Model):
"""Result of the request to list IoT Hub operations. It contains a list of operations and a URL
link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of IoT Hub operations supported by the Microsoft.Devices resource provider.
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.Operation]
:ivar next_link: URL to get the next set of operation list results if there are any.
:vartype next_link: str
"""
_validation = {
"value": {"readonly": True},
"next_link": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[Operation]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.value = None
self.next_link = None
class PrivateEndpoint(_serialization.Model):
"""The private endpoint property of a private endpoint connection.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource identifier.
:vartype id: str
"""
_validation = {
"id": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
class PrivateEndpointConnection(_serialization.Model):
"""The private endpoint connection of an IotHub.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The resource name.
:vartype name: str
:ivar type: The resource type.
:vartype type: str
:ivar properties: The properties of a private endpoint connection. Required.
:vartype properties:
~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateEndpointConnectionProperties
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"properties": {"required": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"properties": {"key": "properties", "type": "PrivateEndpointConnectionProperties"},
}
def __init__(self, *, properties: "_models.PrivateEndpointConnectionProperties", **kwargs: Any) -> None:
"""
:keyword properties: The properties of a private endpoint connection. Required.
:paramtype properties:
~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateEndpointConnectionProperties
"""
super().__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.properties = properties
class PrivateEndpointConnectionProperties(_serialization.Model):
"""The properties of a private endpoint connection.
All required parameters must be populated in order to send to Azure.
:ivar private_endpoint: The private endpoint property of a private endpoint connection.
:vartype private_endpoint: ~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateEndpoint
:ivar private_link_service_connection_state: The current state of a private endpoint
connection. Required.
:vartype private_link_service_connection_state:
~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateLinkServiceConnectionState
"""
_validation = {
"private_link_service_connection_state": {"required": True},
}
_attribute_map = {
"private_endpoint": {"key": "privateEndpoint", "type": "PrivateEndpoint"},
"private_link_service_connection_state": {
"key": "privateLinkServiceConnectionState",
"type": "PrivateLinkServiceConnectionState",
},
}
def __init__(
self,
*,
private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState",
private_endpoint: Optional["_models.PrivateEndpoint"] = None,
**kwargs: Any
) -> None:
"""
:keyword private_endpoint: The private endpoint property of a private endpoint connection.
:paramtype private_endpoint: ~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateEndpoint
:keyword private_link_service_connection_state: The current state of a private endpoint
connection. Required.
:paramtype private_link_service_connection_state:
~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateLinkServiceConnectionState
"""
super().__init__(**kwargs)
self.private_endpoint = private_endpoint
self.private_link_service_connection_state = private_link_service_connection_state
class PrivateLinkResources(_serialization.Model):
"""The available private link resources for an IotHub.
:ivar value: The list of available private link resources for an IotHub.
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.GroupIdInformation]
"""
_attribute_map = {
"value": {"key": "value", "type": "[GroupIdInformation]"},
}
def __init__(self, *, value: Optional[List["_models.GroupIdInformation"]] = None, **kwargs: Any) -> None:
"""
:keyword value: The list of available private link resources for an IotHub.
:paramtype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.GroupIdInformation]
"""
super().__init__(**kwargs)
self.value = value
class PrivateLinkServiceConnectionState(_serialization.Model):
"""The current state of a private endpoint connection.
All required parameters must be populated in order to send to Azure.
:ivar status: The status of a private endpoint connection. Required. Known values are:
"Pending", "Approved", "Rejected", and "Disconnected".
:vartype status: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateLinkServiceConnectionStatus
:ivar description: The description for the current state of a private endpoint connection.
Required.
:vartype description: str
:ivar actions_required: Actions required for a private endpoint connection.
:vartype actions_required: str
"""
_validation = {
"status": {"required": True},
"description": {"required": True},
}
_attribute_map = {
"status": {"key": "status", "type": "str"},
"description": {"key": "description", "type": "str"},
"actions_required": {"key": "actionsRequired", "type": "str"},
}
def __init__(
self,
*,
status: Union[str, "_models.PrivateLinkServiceConnectionStatus"],
description: str,
actions_required: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword status: The status of a private endpoint connection. Required. Known values are:
"Pending", "Approved", "Rejected", and "Disconnected".
:paramtype status: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.PrivateLinkServiceConnectionStatus
:keyword description: The description for the current state of a private endpoint connection.
Required.
:paramtype description: str
:keyword actions_required: Actions required for a private endpoint connection.
:paramtype actions_required: str
"""
super().__init__(**kwargs)
self.status = status
self.description = description
self.actions_required = actions_required
class RegistryStatistics(_serialization.Model):
"""Identity registry statistics.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar total_device_count: The total count of devices in the identity registry.
:vartype total_device_count: int
:ivar enabled_device_count: The count of enabled devices in the identity registry.
:vartype enabled_device_count: int
:ivar disabled_device_count: The count of disabled devices in the identity registry.
:vartype disabled_device_count: int
"""
_validation = {
"total_device_count": {"readonly": True},
"enabled_device_count": {"readonly": True},
"disabled_device_count": {"readonly": True},
}
_attribute_map = {
"total_device_count": {"key": "totalDeviceCount", "type": "int"},
"enabled_device_count": {"key": "enabledDeviceCount", "type": "int"},
"disabled_device_count": {"key": "disabledDeviceCount", "type": "int"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.total_device_count = None
self.enabled_device_count = None
self.disabled_device_count = None
class RootCertificateProperties(_serialization.Model):
"""This property store root certificate related information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar enable_root_certificate_v2: This property when set to true, hub will use G2 cert; while
it's set to false, hub uses Baltimore Cert.
:vartype enable_root_certificate_v2: bool
:ivar last_updated_time_utc: the last update time to root certificate flag.
:vartype last_updated_time_utc: ~datetime.datetime
"""
_validation = {
"last_updated_time_utc": {"readonly": True},
}
_attribute_map = {
"enable_root_certificate_v2": {"key": "enableRootCertificateV2", "type": "bool"},
"last_updated_time_utc": {"key": "lastUpdatedTimeUtc", "type": "iso-8601"},
}
def __init__(self, *, enable_root_certificate_v2: Optional[bool] = None, **kwargs: Any) -> None:
"""
:keyword enable_root_certificate_v2: This property when set to true, hub will use G2 cert;
while it's set to false, hub uses Baltimore Cert.
:paramtype enable_root_certificate_v2: bool
"""
super().__init__(**kwargs)
self.enable_root_certificate_v2 = enable_root_certificate_v2
self.last_updated_time_utc = None
class RouteCompilationError(_serialization.Model):
"""Compilation error when evaluating route.
:ivar message: Route error message.
:vartype message: str
:ivar severity: Severity of the route error. Known values are: "error" and "warning".
:vartype severity: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteErrorSeverity
:ivar location: Location where the route error happened.
:vartype location: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteErrorRange
"""
_attribute_map = {
"message": {"key": "message", "type": "str"},
"severity": {"key": "severity", "type": "str"},
"location": {"key": "location", "type": "RouteErrorRange"},
}
def __init__(
self,
*,
message: Optional[str] = None,
severity: Optional[Union[str, "_models.RouteErrorSeverity"]] = None,
location: Optional["_models.RouteErrorRange"] = None,
**kwargs: Any
) -> None:
"""
:keyword message: Route error message.
:paramtype message: str
:keyword severity: Severity of the route error. Known values are: "error" and "warning".
:paramtype severity: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteErrorSeverity
:keyword location: Location where the route error happened.
:paramtype location: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteErrorRange
"""
super().__init__(**kwargs)
self.message = message
self.severity = severity
self.location = location
class RouteErrorPosition(_serialization.Model):
"""Position where the route error happened.
:ivar line: Line where the route error happened.
:vartype line: int
:ivar column: Column where the route error happened.
:vartype column: int
"""
_attribute_map = {
"line": {"key": "line", "type": "int"},
"column": {"key": "column", "type": "int"},
}
def __init__(self, *, line: Optional[int] = None, column: Optional[int] = None, **kwargs: Any) -> None:
"""
:keyword line: Line where the route error happened.
:paramtype line: int
:keyword column: Column where the route error happened.
:paramtype column: int
"""
super().__init__(**kwargs)
self.line = line
self.column = column
class RouteErrorRange(_serialization.Model):
"""Range of route errors.
:ivar start: Start where the route error happened.
:vartype start: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteErrorPosition
:ivar end: End where the route error happened.
:vartype end: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteErrorPosition
"""
_attribute_map = {
"start": {"key": "start", "type": "RouteErrorPosition"},
"end": {"key": "end", "type": "RouteErrorPosition"},
}
def __init__(
self,
*,
start: Optional["_models.RouteErrorPosition"] = None,
end: Optional["_models.RouteErrorPosition"] = None,
**kwargs: Any
) -> None:
"""
:keyword start: Start where the route error happened.
:paramtype start: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteErrorPosition
:keyword end: End where the route error happened.
:paramtype end: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteErrorPosition
"""
super().__init__(**kwargs)
self.start = start
self.end = end
class RouteProperties(_serialization.Model):
"""The properties of a routing rule that your IoT hub uses to route messages to endpoints.
All required parameters must be populated in order to send to Azure.
:ivar name: The name of the route. The name can only include alphanumeric characters, periods,
underscores, hyphens, has a maximum length of 64 characters, and must be unique. Required.
:vartype name: str
:ivar source: The source that the routing rule is to be applied to, such as DeviceMessages.
Required. Known values are: "Invalid", "DeviceMessages", "TwinChangeEvents",
"DeviceLifecycleEvents", "DeviceJobLifecycleEvents", "DigitalTwinChangeEvents",
"DeviceConnectionStateEvents", and "MqttBrokerMessages".
:vartype source: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingSource
:ivar condition: The condition that is evaluated to apply the routing rule. If no condition is
provided, it evaluates to true by default. For grammar, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language.
:vartype condition: str
:ivar endpoint_names: The list of endpoints to which messages that satisfy the condition are
routed. Currently only one endpoint is allowed. Required.
:vartype endpoint_names: list[str]
:ivar is_enabled: Used to specify whether a route is enabled. Required.
:vartype is_enabled: bool
"""
_validation = {
"name": {"required": True, "pattern": r"^[A-Za-z0-9-._]{1,64}$"},
"source": {"required": True},
"endpoint_names": {"required": True, "max_items": 1, "min_items": 1},
"is_enabled": {"required": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
"source": {"key": "source", "type": "str"},
"condition": {"key": "condition", "type": "str"},
"endpoint_names": {"key": "endpointNames", "type": "[str]"},
"is_enabled": {"key": "isEnabled", "type": "bool"},
}
def __init__(
self,
*,
name: str,
source: Union[str, "_models.RoutingSource"],
endpoint_names: List[str],
is_enabled: bool,
condition: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword name: The name of the route. The name can only include alphanumeric characters,
periods, underscores, hyphens, has a maximum length of 64 characters, and must be unique.
Required.
:paramtype name: str
:keyword source: The source that the routing rule is to be applied to, such as DeviceMessages.
Required. Known values are: "Invalid", "DeviceMessages", "TwinChangeEvents",
"DeviceLifecycleEvents", "DeviceJobLifecycleEvents", "DigitalTwinChangeEvents",
"DeviceConnectionStateEvents", and "MqttBrokerMessages".
:paramtype source: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingSource
:keyword condition: The condition that is evaluated to apply the routing rule. If no condition
is provided, it evaluates to true by default. For grammar, see:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language.
:paramtype condition: str
:keyword endpoint_names: The list of endpoints to which messages that satisfy the condition are
routed. Currently only one endpoint is allowed. Required.
:paramtype endpoint_names: list[str]
:keyword is_enabled: Used to specify whether a route is enabled. Required.
:paramtype is_enabled: bool
"""
super().__init__(**kwargs)
self.name = name
self.source = source
self.condition = condition
self.endpoint_names = endpoint_names
self.is_enabled = is_enabled
class RoutingCosmosDBSqlApiProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes
"""The properties related to a cosmos DB sql collection endpoint.
All required parameters must be populated in order to send to Azure.
:ivar name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. Required.
:vartype name: str
:ivar id: Id of the cosmos DB sql collection endpoint.
:vartype id: str
:ivar subscription_id: The subscription identifier of the cosmos DB account.
:vartype subscription_id: str
:ivar resource_group: The name of the resource group of the cosmos DB account.
:vartype resource_group: str
:ivar endpoint_uri: The url of the cosmos DB account. It must include the protocol https://.
Required.
:vartype endpoint_uri: str
:ivar authentication_type: Method used to authenticate against the cosmos DB sql collection
endpoint. Known values are: "keyBased" and "identityBased".
:vartype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:ivar identity: Managed identity properties of routing cosmos DB collection endpoint.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:ivar primary_key: The primary key of the cosmos DB account.
:vartype primary_key: str
:ivar secondary_key: The secondary key of the cosmos DB account.
:vartype secondary_key: str
:ivar database_name: The name of the cosmos DB database in the cosmos DB account. Required.
:vartype database_name: str
:ivar collection_name: The name of the cosmos DB sql collection in the cosmos DB database.
Required.
:vartype collection_name: str
:ivar partition_key_name: The name of the partition key associated with this cosmos DB sql
collection if one exists. This is an optional parameter.
:vartype partition_key_name: str
:ivar partition_key_template: The template for generating a synthetic partition key value for
use with this cosmos DB sql collection. The template must include at least one of the following
placeholders: {iothub}, {deviceid}, {DD}, {MM}, and {YYYY}. Any one placeholder may be
specified at most once, but order and non-placeholder components are arbitrary. This parameter
is only required if PartitionKeyName is specified.
:vartype partition_key_template: str
"""
_validation = {
"name": {"required": True, "pattern": r"^[A-Za-z0-9-._]{1,64}$"},
"endpoint_uri": {"required": True},
"database_name": {"required": True},
"collection_name": {"required": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
"id": {"key": "id", "type": "str"},
"subscription_id": {"key": "subscriptionId", "type": "str"},
"resource_group": {"key": "resourceGroup", "type": "str"},
"endpoint_uri": {"key": "endpointUri", "type": "str"},
"authentication_type": {"key": "authenticationType", "type": "str"},
"identity": {"key": "identity", "type": "ManagedIdentity"},
"primary_key": {"key": "primaryKey", "type": "str"},
"secondary_key": {"key": "secondaryKey", "type": "str"},
"database_name": {"key": "databaseName", "type": "str"},
"collection_name": {"key": "collectionName", "type": "str"},
"partition_key_name": {"key": "partitionKeyName", "type": "str"},
"partition_key_template": {"key": "partitionKeyTemplate", "type": "str"},
}
def __init__(
self,
*,
name: str,
endpoint_uri: str,
database_name: str,
collection_name: str,
id: Optional[str] = None, # pylint: disable=redefined-builtin
subscription_id: Optional[str] = None,
resource_group: Optional[str] = None,
authentication_type: Optional[Union[str, "_models.AuthenticationType"]] = None,
identity: Optional["_models.ManagedIdentity"] = None,
primary_key: Optional[str] = None,
secondary_key: Optional[str] = None,
partition_key_name: Optional[str] = None,
partition_key_template: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. Required.
:paramtype name: str
:keyword id: Id of the cosmos DB sql collection endpoint.
:paramtype id: str
:keyword subscription_id: The subscription identifier of the cosmos DB account.
:paramtype subscription_id: str
:keyword resource_group: The name of the resource group of the cosmos DB account.
:paramtype resource_group: str
:keyword endpoint_uri: The url of the cosmos DB account. It must include the protocol https://.
Required.
:paramtype endpoint_uri: str
:keyword authentication_type: Method used to authenticate against the cosmos DB sql collection
endpoint. Known values are: "keyBased" and "identityBased".
:paramtype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:keyword identity: Managed identity properties of routing cosmos DB collection endpoint.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:keyword primary_key: The primary key of the cosmos DB account.
:paramtype primary_key: str
:keyword secondary_key: The secondary key of the cosmos DB account.
:paramtype secondary_key: str
:keyword database_name: The name of the cosmos DB database in the cosmos DB account. Required.
:paramtype database_name: str
:keyword collection_name: The name of the cosmos DB sql collection in the cosmos DB database.
Required.
:paramtype collection_name: str
:keyword partition_key_name: The name of the partition key associated with this cosmos DB sql
collection if one exists. This is an optional parameter.
:paramtype partition_key_name: str
:keyword partition_key_template: The template for generating a synthetic partition key value
for use with this cosmos DB sql collection. The template must include at least one of the
following placeholders: {iothub}, {deviceid}, {DD}, {MM}, and {YYYY}. Any one placeholder may
be specified at most once, but order and non-placeholder components are arbitrary. This
parameter is only required if PartitionKeyName is specified.
:paramtype partition_key_template: str
"""
super().__init__(**kwargs)
self.name = name
self.id = id
self.subscription_id = subscription_id
self.resource_group = resource_group
self.endpoint_uri = endpoint_uri
self.authentication_type = authentication_type
self.identity = identity
self.primary_key = primary_key
self.secondary_key = secondary_key
self.database_name = database_name
self.collection_name = collection_name
self.partition_key_name = partition_key_name
self.partition_key_template = partition_key_template
class RoutingEndpoints(_serialization.Model):
"""The properties related to the custom endpoints to which your IoT hub routes messages based on
the routing rules. A maximum of 10 custom endpoints are allowed across all endpoint types for
paid hubs and only 1 custom endpoint is allowed across all endpoint types for free hubs.
:ivar service_bus_queues: The list of Service Bus queue endpoints that IoT hub routes the
messages to, based on the routing rules.
:vartype service_bus_queues:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingServiceBusQueueEndpointProperties]
:ivar service_bus_topics: The list of Service Bus topic endpoints that the IoT hub routes the
messages to, based on the routing rules.
:vartype service_bus_topics:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingServiceBusTopicEndpointProperties]
:ivar event_hubs: The list of Event Hubs endpoints that IoT hub routes messages to, based on
the routing rules. This list does not include the built-in Event Hubs endpoint.
:vartype event_hubs:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingEventHubProperties]
:ivar storage_containers: The list of storage container endpoints that IoT hub routes messages
to, based on the routing rules.
:vartype storage_containers:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingStorageContainerProperties]
:ivar cosmos_db_sql_collections: The list of Cosmos DB collection endpoints that IoT hub routes
messages to, based on the routing rules.
:vartype cosmos_db_sql_collections:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingCosmosDBSqlApiProperties]
"""
_attribute_map = {
"service_bus_queues": {"key": "serviceBusQueues", "type": "[RoutingServiceBusQueueEndpointProperties]"},
"service_bus_topics": {"key": "serviceBusTopics", "type": "[RoutingServiceBusTopicEndpointProperties]"},
"event_hubs": {"key": "eventHubs", "type": "[RoutingEventHubProperties]"},
"storage_containers": {"key": "storageContainers", "type": "[RoutingStorageContainerProperties]"},
"cosmos_db_sql_collections": {"key": "cosmosDBSqlCollections", "type": "[RoutingCosmosDBSqlApiProperties]"},
}
def __init__(
self,
*,
service_bus_queues: Optional[List["_models.RoutingServiceBusQueueEndpointProperties"]] = None,
service_bus_topics: Optional[List["_models.RoutingServiceBusTopicEndpointProperties"]] = None,
event_hubs: Optional[List["_models.RoutingEventHubProperties"]] = None,
storage_containers: Optional[List["_models.RoutingStorageContainerProperties"]] = None,
cosmos_db_sql_collections: Optional[List["_models.RoutingCosmosDBSqlApiProperties"]] = None,
**kwargs: Any
) -> None:
"""
:keyword service_bus_queues: The list of Service Bus queue endpoints that IoT hub routes the
messages to, based on the routing rules.
:paramtype service_bus_queues:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingServiceBusQueueEndpointProperties]
:keyword service_bus_topics: The list of Service Bus topic endpoints that the IoT hub routes
the messages to, based on the routing rules.
:paramtype service_bus_topics:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingServiceBusTopicEndpointProperties]
:keyword event_hubs: The list of Event Hubs endpoints that IoT hub routes messages to, based on
the routing rules. This list does not include the built-in Event Hubs endpoint.
:paramtype event_hubs:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingEventHubProperties]
:keyword storage_containers: The list of storage container endpoints that IoT hub routes
messages to, based on the routing rules.
:paramtype storage_containers:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingStorageContainerProperties]
:keyword cosmos_db_sql_collections: The list of Cosmos DB collection endpoints that IoT hub
routes messages to, based on the routing rules.
:paramtype cosmos_db_sql_collections:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingCosmosDBSqlApiProperties]
"""
super().__init__(**kwargs)
self.service_bus_queues = service_bus_queues
self.service_bus_topics = service_bus_topics
self.event_hubs = event_hubs
self.storage_containers = storage_containers
self.cosmos_db_sql_collections = cosmos_db_sql_collections
class RoutingEventHubProperties(_serialization.Model):
"""The properties related to an event hub endpoint.
All required parameters must be populated in order to send to Azure.
:ivar id: Id of the event hub endpoint.
:vartype id: str
:ivar connection_string: The connection string of the event hub endpoint.
:vartype connection_string: str
:ivar endpoint_uri: The url of the event hub endpoint. It must include the protocol sb://.
:vartype endpoint_uri: str
:ivar entity_path: Event hub name on the event hub namespace.
:vartype entity_path: str
:ivar authentication_type: Method used to authenticate against the event hub endpoint. Known
values are: "keyBased" and "identityBased".
:vartype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:ivar identity: Managed identity properties of routing event hub endpoint.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:ivar name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. Required.
:vartype name: str
:ivar subscription_id: The subscription identifier of the event hub endpoint.
:vartype subscription_id: str
:ivar resource_group: The name of the resource group of the event hub endpoint.
:vartype resource_group: str
"""
_validation = {
"name": {"required": True, "pattern": r"^[A-Za-z0-9-._]{1,64}$"},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"connection_string": {"key": "connectionString", "type": "str"},
"endpoint_uri": {"key": "endpointUri", "type": "str"},
"entity_path": {"key": "entityPath", "type": "str"},
"authentication_type": {"key": "authenticationType", "type": "str"},
"identity": {"key": "identity", "type": "ManagedIdentity"},
"name": {"key": "name", "type": "str"},
"subscription_id": {"key": "subscriptionId", "type": "str"},
"resource_group": {"key": "resourceGroup", "type": "str"},
}
def __init__(
self,
*,
name: str,
id: Optional[str] = None, # pylint: disable=redefined-builtin
connection_string: Optional[str] = None,
endpoint_uri: Optional[str] = None,
entity_path: Optional[str] = None,
authentication_type: Optional[Union[str, "_models.AuthenticationType"]] = None,
identity: Optional["_models.ManagedIdentity"] = None,
subscription_id: Optional[str] = None,
resource_group: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword id: Id of the event hub endpoint.
:paramtype id: str
:keyword connection_string: The connection string of the event hub endpoint.
:paramtype connection_string: str
:keyword endpoint_uri: The url of the event hub endpoint. It must include the protocol sb://.
:paramtype endpoint_uri: str
:keyword entity_path: Event hub name on the event hub namespace.
:paramtype entity_path: str
:keyword authentication_type: Method used to authenticate against the event hub endpoint. Known
values are: "keyBased" and "identityBased".
:paramtype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:keyword identity: Managed identity properties of routing event hub endpoint.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:keyword name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. Required.
:paramtype name: str
:keyword subscription_id: The subscription identifier of the event hub endpoint.
:paramtype subscription_id: str
:keyword resource_group: The name of the resource group of the event hub endpoint.
:paramtype resource_group: str
"""
super().__init__(**kwargs)
self.id = id
self.connection_string = connection_string
self.endpoint_uri = endpoint_uri
self.entity_path = entity_path
self.authentication_type = authentication_type
self.identity = identity
self.name = name
self.subscription_id = subscription_id
self.resource_group = resource_group
class RoutingMessage(_serialization.Model):
"""Routing message.
:ivar body: Body of routing message.
:vartype body: str
:ivar app_properties: App properties.
:vartype app_properties: dict[str, str]
:ivar system_properties: System properties.
:vartype system_properties: dict[str, str]
"""
_attribute_map = {
"body": {"key": "body", "type": "str"},
"app_properties": {"key": "appProperties", "type": "{str}"},
"system_properties": {"key": "systemProperties", "type": "{str}"},
}
def __init__(
self,
*,
body: Optional[str] = None,
app_properties: Optional[Dict[str, str]] = None,
system_properties: Optional[Dict[str, str]] = None,
**kwargs: Any
) -> None:
"""
:keyword body: Body of routing message.
:paramtype body: str
:keyword app_properties: App properties.
:paramtype app_properties: dict[str, str]
:keyword system_properties: System properties.
:paramtype system_properties: dict[str, str]
"""
super().__init__(**kwargs)
self.body = body
self.app_properties = app_properties
self.system_properties = system_properties
class RoutingProperties(_serialization.Model):
"""The routing related properties of the IoT hub. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging.
:ivar endpoints: The properties related to the custom endpoints to which your IoT hub routes
messages based on the routing rules. A maximum of 10 custom endpoints are allowed across all
endpoint types for paid hubs and only 1 custom endpoint is allowed across all endpoint types
for free hubs.
:vartype endpoints: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingEndpoints
:ivar routes: The list of user-provided routing rules that the IoT hub uses to route messages
to built-in and custom endpoints. A maximum of 100 routing rules are allowed for paid hubs and
a maximum of 5 routing rules are allowed for free hubs.
:vartype routes: list[~azure.mgmt.iothub.v2022_04_30_preview.models.RouteProperties]
:ivar fallback_route: The properties of the route that is used as a fall-back route when none
of the conditions specified in the 'routes' section are met. This is an optional parameter.
When this property is not set, the messages which do not meet any of the conditions specified
in the 'routes' section get routed to the built-in eventhub endpoint.
:vartype fallback_route: ~azure.mgmt.iothub.v2022_04_30_preview.models.FallbackRouteProperties
:ivar enrichments: The list of user-provided enrichments that the IoT hub applies to messages
to be delivered to built-in and custom endpoints. See: https://aka.ms/telemetryoneventgrid.
:vartype enrichments: list[~azure.mgmt.iothub.v2022_04_30_preview.models.EnrichmentProperties]
"""
_attribute_map = {
"endpoints": {"key": "endpoints", "type": "RoutingEndpoints"},
"routes": {"key": "routes", "type": "[RouteProperties]"},
"fallback_route": {"key": "fallbackRoute", "type": "FallbackRouteProperties"},
"enrichments": {"key": "enrichments", "type": "[EnrichmentProperties]"},
}
def __init__(
self,
*,
endpoints: Optional["_models.RoutingEndpoints"] = None,
routes: Optional[List["_models.RouteProperties"]] = None,
fallback_route: Optional["_models.FallbackRouteProperties"] = None,
enrichments: Optional[List["_models.EnrichmentProperties"]] = None,
**kwargs: Any
) -> None:
"""
:keyword endpoints: The properties related to the custom endpoints to which your IoT hub routes
messages based on the routing rules. A maximum of 10 custom endpoints are allowed across all
endpoint types for paid hubs and only 1 custom endpoint is allowed across all endpoint types
for free hubs.
:paramtype endpoints: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingEndpoints
:keyword routes: The list of user-provided routing rules that the IoT hub uses to route
messages to built-in and custom endpoints. A maximum of 100 routing rules are allowed for paid
hubs and a maximum of 5 routing rules are allowed for free hubs.
:paramtype routes: list[~azure.mgmt.iothub.v2022_04_30_preview.models.RouteProperties]
:keyword fallback_route: The properties of the route that is used as a fall-back route when
none of the conditions specified in the 'routes' section are met. This is an optional
parameter. When this property is not set, the messages which do not meet any of the conditions
specified in the 'routes' section get routed to the built-in eventhub endpoint.
:paramtype fallback_route:
~azure.mgmt.iothub.v2022_04_30_preview.models.FallbackRouteProperties
:keyword enrichments: The list of user-provided enrichments that the IoT hub applies to
messages to be delivered to built-in and custom endpoints. See:
https://aka.ms/telemetryoneventgrid.
:paramtype enrichments:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.EnrichmentProperties]
"""
super().__init__(**kwargs)
self.endpoints = endpoints
self.routes = routes
self.fallback_route = fallback_route
self.enrichments = enrichments
class RoutingServiceBusQueueEndpointProperties(_serialization.Model):
"""The properties related to service bus queue endpoint types.
All required parameters must be populated in order to send to Azure.
:ivar id: Id of the service bus queue endpoint.
:vartype id: str
:ivar connection_string: The connection string of the service bus queue endpoint.
:vartype connection_string: str
:ivar endpoint_uri: The url of the service bus queue endpoint. It must include the protocol
sb://.
:vartype endpoint_uri: str
:ivar entity_path: Queue name on the service bus namespace.
:vartype entity_path: str
:ivar authentication_type: Method used to authenticate against the service bus queue endpoint.
Known values are: "keyBased" and "identityBased".
:vartype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:ivar identity: Managed identity properties of routing service bus queue endpoint.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:ivar name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. The name need not be the same as the actual queue name. Required.
:vartype name: str
:ivar subscription_id: The subscription identifier of the service bus queue endpoint.
:vartype subscription_id: str
:ivar resource_group: The name of the resource group of the service bus queue endpoint.
:vartype resource_group: str
"""
_validation = {
"name": {"required": True, "pattern": r"^[A-Za-z0-9-._]{1,64}$"},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"connection_string": {"key": "connectionString", "type": "str"},
"endpoint_uri": {"key": "endpointUri", "type": "str"},
"entity_path": {"key": "entityPath", "type": "str"},
"authentication_type": {"key": "authenticationType", "type": "str"},
"identity": {"key": "identity", "type": "ManagedIdentity"},
"name": {"key": "name", "type": "str"},
"subscription_id": {"key": "subscriptionId", "type": "str"},
"resource_group": {"key": "resourceGroup", "type": "str"},
}
def __init__(
self,
*,
name: str,
id: Optional[str] = None, # pylint: disable=redefined-builtin
connection_string: Optional[str] = None,
endpoint_uri: Optional[str] = None,
entity_path: Optional[str] = None,
authentication_type: Optional[Union[str, "_models.AuthenticationType"]] = None,
identity: Optional["_models.ManagedIdentity"] = None,
subscription_id: Optional[str] = None,
resource_group: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword id: Id of the service bus queue endpoint.
:paramtype id: str
:keyword connection_string: The connection string of the service bus queue endpoint.
:paramtype connection_string: str
:keyword endpoint_uri: The url of the service bus queue endpoint. It must include the protocol
sb://.
:paramtype endpoint_uri: str
:keyword entity_path: Queue name on the service bus namespace.
:paramtype entity_path: str
:keyword authentication_type: Method used to authenticate against the service bus queue
endpoint. Known values are: "keyBased" and "identityBased".
:paramtype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:keyword identity: Managed identity properties of routing service bus queue endpoint.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:keyword name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. The name need not be the same as the actual queue name. Required.
:paramtype name: str
:keyword subscription_id: The subscription identifier of the service bus queue endpoint.
:paramtype subscription_id: str
:keyword resource_group: The name of the resource group of the service bus queue endpoint.
:paramtype resource_group: str
"""
super().__init__(**kwargs)
self.id = id
self.connection_string = connection_string
self.endpoint_uri = endpoint_uri
self.entity_path = entity_path
self.authentication_type = authentication_type
self.identity = identity
self.name = name
self.subscription_id = subscription_id
self.resource_group = resource_group
class RoutingServiceBusTopicEndpointProperties(_serialization.Model):
"""The properties related to service bus topic endpoint types.
All required parameters must be populated in order to send to Azure.
:ivar id: Id of the service bus topic endpoint.
:vartype id: str
:ivar connection_string: The connection string of the service bus topic endpoint.
:vartype connection_string: str
:ivar endpoint_uri: The url of the service bus topic endpoint. It must include the protocol
sb://.
:vartype endpoint_uri: str
:ivar entity_path: Queue name on the service bus topic.
:vartype entity_path: str
:ivar authentication_type: Method used to authenticate against the service bus topic endpoint.
Known values are: "keyBased" and "identityBased".
:vartype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:ivar identity: Managed identity properties of routing service bus topic endpoint.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:ivar name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. The name need not be the same as the actual topic name.
Required.
:vartype name: str
:ivar subscription_id: The subscription identifier of the service bus topic endpoint.
:vartype subscription_id: str
:ivar resource_group: The name of the resource group of the service bus topic endpoint.
:vartype resource_group: str
"""
_validation = {
"name": {"required": True, "pattern": r"^[A-Za-z0-9-._]{1,64}$"},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"connection_string": {"key": "connectionString", "type": "str"},
"endpoint_uri": {"key": "endpointUri", "type": "str"},
"entity_path": {"key": "entityPath", "type": "str"},
"authentication_type": {"key": "authenticationType", "type": "str"},
"identity": {"key": "identity", "type": "ManagedIdentity"},
"name": {"key": "name", "type": "str"},
"subscription_id": {"key": "subscriptionId", "type": "str"},
"resource_group": {"key": "resourceGroup", "type": "str"},
}
def __init__(
self,
*,
name: str,
id: Optional[str] = None, # pylint: disable=redefined-builtin
connection_string: Optional[str] = None,
endpoint_uri: Optional[str] = None,
entity_path: Optional[str] = None,
authentication_type: Optional[Union[str, "_models.AuthenticationType"]] = None,
identity: Optional["_models.ManagedIdentity"] = None,
subscription_id: Optional[str] = None,
resource_group: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword id: Id of the service bus topic endpoint.
:paramtype id: str
:keyword connection_string: The connection string of the service bus topic endpoint.
:paramtype connection_string: str
:keyword endpoint_uri: The url of the service bus topic endpoint. It must include the protocol
sb://.
:paramtype endpoint_uri: str
:keyword entity_path: Queue name on the service bus topic.
:paramtype entity_path: str
:keyword authentication_type: Method used to authenticate against the service bus topic
endpoint. Known values are: "keyBased" and "identityBased".
:paramtype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:keyword identity: Managed identity properties of routing service bus topic endpoint.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:keyword name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. The name need not be the same as the actual topic name.
Required.
:paramtype name: str
:keyword subscription_id: The subscription identifier of the service bus topic endpoint.
:paramtype subscription_id: str
:keyword resource_group: The name of the resource group of the service bus topic endpoint.
:paramtype resource_group: str
"""
super().__init__(**kwargs)
self.id = id
self.connection_string = connection_string
self.endpoint_uri = endpoint_uri
self.entity_path = entity_path
self.authentication_type = authentication_type
self.identity = identity
self.name = name
self.subscription_id = subscription_id
self.resource_group = resource_group
class RoutingStorageContainerProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes
"""The properties related to a storage container endpoint.
All required parameters must be populated in order to send to Azure.
:ivar id: Id of the storage container endpoint.
:vartype id: str
:ivar connection_string: The connection string of the storage account.
:vartype connection_string: str
:ivar endpoint_uri: The url of the storage endpoint. It must include the protocol https://.
:vartype endpoint_uri: str
:ivar authentication_type: Method used to authenticate against the storage endpoint. Known
values are: "keyBased" and "identityBased".
:vartype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:ivar identity: Managed identity properties of routing storage endpoint.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:ivar name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. Required.
:vartype name: str
:ivar subscription_id: The subscription identifier of the storage account.
:vartype subscription_id: str
:ivar resource_group: The name of the resource group of the storage account.
:vartype resource_group: str
:ivar container_name: The name of storage container in the storage account. Required.
:vartype container_name: str
:ivar file_name_format: File name format for the blob. Default format is
{iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters are mandatory but can be
reordered.
:vartype file_name_format: str
:ivar batch_frequency_in_seconds: Time interval at which blobs are written to storage. Value
should be between 60 and 720 seconds. Default value is 300 seconds.
:vartype batch_frequency_in_seconds: int
:ivar max_chunk_size_in_bytes: Maximum number of bytes for each blob written to storage. Value
should be between 10485760(10MB) and 524288000(500MB). Default value is 314572800(300MB).
:vartype max_chunk_size_in_bytes: int
:ivar encoding: Encoding that is used to serialize messages to blobs. Supported values are
'avro', 'avrodeflate', and 'JSON'. Default value is 'avro'. Known values are: "Avro",
"AvroDeflate", and "JSON".
:vartype encoding: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingStorageContainerPropertiesEncoding
"""
_validation = {
"name": {"required": True, "pattern": r"^[A-Za-z0-9-._]{1,64}$"},
"container_name": {"required": True},
"batch_frequency_in_seconds": {"maximum": 720, "minimum": 60},
"max_chunk_size_in_bytes": {"maximum": 524288000, "minimum": 10485760},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"connection_string": {"key": "connectionString", "type": "str"},
"endpoint_uri": {"key": "endpointUri", "type": "str"},
"authentication_type": {"key": "authenticationType", "type": "str"},
"identity": {"key": "identity", "type": "ManagedIdentity"},
"name": {"key": "name", "type": "str"},
"subscription_id": {"key": "subscriptionId", "type": "str"},
"resource_group": {"key": "resourceGroup", "type": "str"},
"container_name": {"key": "containerName", "type": "str"},
"file_name_format": {"key": "fileNameFormat", "type": "str"},
"batch_frequency_in_seconds": {"key": "batchFrequencyInSeconds", "type": "int"},
"max_chunk_size_in_bytes": {"key": "maxChunkSizeInBytes", "type": "int"},
"encoding": {"key": "encoding", "type": "str"},
}
def __init__(
self,
*,
name: str,
container_name: str,
id: Optional[str] = None, # pylint: disable=redefined-builtin
connection_string: Optional[str] = None,
endpoint_uri: Optional[str] = None,
authentication_type: Optional[Union[str, "_models.AuthenticationType"]] = None,
identity: Optional["_models.ManagedIdentity"] = None,
subscription_id: Optional[str] = None,
resource_group: Optional[str] = None,
file_name_format: Optional[str] = None,
batch_frequency_in_seconds: Optional[int] = None,
max_chunk_size_in_bytes: Optional[int] = None,
encoding: Optional[Union[str, "_models.RoutingStorageContainerPropertiesEncoding"]] = None,
**kwargs: Any
) -> None:
"""
:keyword id: Id of the storage container endpoint.
:paramtype id: str
:keyword connection_string: The connection string of the storage account.
:paramtype connection_string: str
:keyword endpoint_uri: The url of the storage endpoint. It must include the protocol https://.
:paramtype endpoint_uri: str
:keyword authentication_type: Method used to authenticate against the storage endpoint. Known
values are: "keyBased" and "identityBased".
:paramtype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:keyword identity: Managed identity properties of routing storage endpoint.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
:keyword name: The name that identifies this endpoint. The name can only include alphanumeric
characters, periods, underscores, hyphens and has a maximum length of 64 characters. The
following names are reserved: events, fileNotifications, $default. Endpoint names must be
unique across endpoint types. Required.
:paramtype name: str
:keyword subscription_id: The subscription identifier of the storage account.
:paramtype subscription_id: str
:keyword resource_group: The name of the resource group of the storage account.
:paramtype resource_group: str
:keyword container_name: The name of storage container in the storage account. Required.
:paramtype container_name: str
:keyword file_name_format: File name format for the blob. Default format is
{iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters are mandatory but can be
reordered.
:paramtype file_name_format: str
:keyword batch_frequency_in_seconds: Time interval at which blobs are written to storage. Value
should be between 60 and 720 seconds. Default value is 300 seconds.
:paramtype batch_frequency_in_seconds: int
:keyword max_chunk_size_in_bytes: Maximum number of bytes for each blob written to storage.
Value should be between 10485760(10MB) and 524288000(500MB). Default value is 314572800(300MB).
:paramtype max_chunk_size_in_bytes: int
:keyword encoding: Encoding that is used to serialize messages to blobs. Supported values are
'avro', 'avrodeflate', and 'JSON'. Default value is 'avro'. Known values are: "Avro",
"AvroDeflate", and "JSON".
:paramtype encoding: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingStorageContainerPropertiesEncoding
"""
super().__init__(**kwargs)
self.id = id
self.connection_string = connection_string
self.endpoint_uri = endpoint_uri
self.authentication_type = authentication_type
self.identity = identity
self.name = name
self.subscription_id = subscription_id
self.resource_group = resource_group
self.container_name = container_name
self.file_name_format = file_name_format
self.batch_frequency_in_seconds = batch_frequency_in_seconds
self.max_chunk_size_in_bytes = max_chunk_size_in_bytes
self.encoding = encoding
class RoutingTwin(_serialization.Model):
"""Twin reference input parameter. This is an optional parameter.
:ivar tags: Twin Tags.
:vartype tags: JSON
:ivar properties:
:vartype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingTwinProperties
"""
_attribute_map = {
"tags": {"key": "tags", "type": "object"},
"properties": {"key": "properties", "type": "RoutingTwinProperties"},
}
def __init__(
self,
*,
tags: Optional[JSON] = None,
properties: Optional["_models.RoutingTwinProperties"] = None,
**kwargs: Any
) -> None:
"""
:keyword tags: Twin Tags.
:paramtype tags: JSON
:keyword properties:
:paramtype properties: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingTwinProperties
"""
super().__init__(**kwargs)
self.tags = tags
self.properties = properties
class RoutingTwinProperties(_serialization.Model):
"""RoutingTwinProperties.
:ivar desired: Twin desired properties.
:vartype desired: JSON
:ivar reported: Twin desired properties.
:vartype reported: JSON
"""
_attribute_map = {
"desired": {"key": "desired", "type": "object"},
"reported": {"key": "reported", "type": "object"},
}
def __init__(self, *, desired: Optional[JSON] = None, reported: Optional[JSON] = None, **kwargs: Any) -> None:
"""
:keyword desired: Twin desired properties.
:paramtype desired: JSON
:keyword reported: Twin desired properties.
:paramtype reported: JSON
"""
super().__init__(**kwargs)
self.desired = desired
self.reported = reported
class SharedAccessSignatureAuthorizationRule(_serialization.Model):
"""The properties of an IoT hub shared access policy.
All required parameters must be populated in order to send to Azure.
:ivar key_name: The name of the shared access policy. Required.
:vartype key_name: str
:ivar primary_key: The primary key.
:vartype primary_key: str
:ivar secondary_key: The secondary key.
:vartype secondary_key: str
:ivar rights: The permissions assigned to the shared access policy. Required. Known values are:
"RegistryRead", "RegistryWrite", "ServiceConnect", "DeviceConnect", "RegistryRead,
RegistryWrite", "RegistryRead, ServiceConnect", "RegistryRead, DeviceConnect", "RegistryWrite,
ServiceConnect", "RegistryWrite, DeviceConnect", "ServiceConnect, DeviceConnect",
"RegistryRead, RegistryWrite, ServiceConnect", "RegistryRead, RegistryWrite, DeviceConnect",
"RegistryRead, ServiceConnect, DeviceConnect", "RegistryWrite, ServiceConnect, DeviceConnect",
and "RegistryRead, RegistryWrite, ServiceConnect, DeviceConnect".
:vartype rights: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.AccessRights
"""
_validation = {
"key_name": {"required": True},
"rights": {"required": True},
}
_attribute_map = {
"key_name": {"key": "keyName", "type": "str"},
"primary_key": {"key": "primaryKey", "type": "str"},
"secondary_key": {"key": "secondaryKey", "type": "str"},
"rights": {"key": "rights", "type": "str"},
}
def __init__(
self,
*,
key_name: str,
rights: Union[str, "_models.AccessRights"],
primary_key: Optional[str] = None,
secondary_key: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword key_name: The name of the shared access policy. Required.
:paramtype key_name: str
:keyword primary_key: The primary key.
:paramtype primary_key: str
:keyword secondary_key: The secondary key.
:paramtype secondary_key: str
:keyword rights: The permissions assigned to the shared access policy. Required. Known values
are: "RegistryRead", "RegistryWrite", "ServiceConnect", "DeviceConnect", "RegistryRead,
RegistryWrite", "RegistryRead, ServiceConnect", "RegistryRead, DeviceConnect", "RegistryWrite,
ServiceConnect", "RegistryWrite, DeviceConnect", "ServiceConnect, DeviceConnect",
"RegistryRead, RegistryWrite, ServiceConnect", "RegistryRead, RegistryWrite, DeviceConnect",
"RegistryRead, ServiceConnect, DeviceConnect", "RegistryWrite, ServiceConnect, DeviceConnect",
and "RegistryRead, RegistryWrite, ServiceConnect, DeviceConnect".
:paramtype rights: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.AccessRights
"""
super().__init__(**kwargs)
self.key_name = key_name
self.primary_key = primary_key
self.secondary_key = secondary_key
self.rights = rights
class SharedAccessSignatureAuthorizationRuleListResult(_serialization.Model):
"""The list of shared access policies with a next link.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: The list of shared access policies.
:vartype value:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.SharedAccessSignatureAuthorizationRule]
:ivar next_link: The next link.
:vartype next_link: str
"""
_validation = {
"next_link": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[SharedAccessSignatureAuthorizationRule]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.SharedAccessSignatureAuthorizationRule"]] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of shared access policies.
:paramtype value:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.SharedAccessSignatureAuthorizationRule]
"""
super().__init__(**kwargs)
self.value = value
self.next_link = None
class StorageEndpointProperties(_serialization.Model):
"""The properties of the Azure Storage endpoint for file upload.
All required parameters must be populated in order to send to Azure.
:ivar sas_ttl_as_iso8601: The period of time for which the SAS URI generated by IoT Hub for
file upload is valid. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload#file-upload-notification-configuration-options.
:vartype sas_ttl_as_iso8601: ~datetime.timedelta
:ivar connection_string: The connection string for the Azure Storage account to which files are
uploaded. Required.
:vartype connection_string: str
:ivar container_name: The name of the root container where you upload files. The container need
not exist but should be creatable using the connectionString specified. Required.
:vartype container_name: str
:ivar authentication_type: Specifies authentication type being used for connecting to the
storage account. Known values are: "keyBased" and "identityBased".
:vartype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:ivar identity: Managed identity properties of storage endpoint for file upload.
:vartype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
"""
_validation = {
"connection_string": {"required": True},
"container_name": {"required": True},
}
_attribute_map = {
"sas_ttl_as_iso8601": {"key": "sasTtlAsIso8601", "type": "duration"},
"connection_string": {"key": "connectionString", "type": "str"},
"container_name": {"key": "containerName", "type": "str"},
"authentication_type": {"key": "authenticationType", "type": "str"},
"identity": {"key": "identity", "type": "ManagedIdentity"},
}
def __init__(
self,
*,
connection_string: str,
container_name: str,
sas_ttl_as_iso8601: Optional[datetime.timedelta] = None,
authentication_type: Optional[Union[str, "_models.AuthenticationType"]] = None,
identity: Optional["_models.ManagedIdentity"] = None,
**kwargs: Any
) -> None:
"""
:keyword sas_ttl_as_iso8601: The period of time for which the SAS URI generated by IoT Hub for
file upload is valid. See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload#file-upload-notification-configuration-options.
:paramtype sas_ttl_as_iso8601: ~datetime.timedelta
:keyword connection_string: The connection string for the Azure Storage account to which files
are uploaded. Required.
:paramtype connection_string: str
:keyword container_name: The name of the root container where you upload files. The container
need not exist but should be creatable using the connectionString specified. Required.
:paramtype container_name: str
:keyword authentication_type: Specifies authentication type being used for connecting to the
storage account. Known values are: "keyBased" and "identityBased".
:paramtype authentication_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.AuthenticationType
:keyword identity: Managed identity properties of storage endpoint for file upload.
:paramtype identity: ~azure.mgmt.iothub.v2022_04_30_preview.models.ManagedIdentity
"""
super().__init__(**kwargs)
self.sas_ttl_as_iso8601 = sas_ttl_as_iso8601
self.connection_string = connection_string
self.container_name = container_name
self.authentication_type = authentication_type
self.identity = identity
class SystemData(_serialization.Model):
"""Metadata pertaining to creation and last modification of the resource.
:ivar created_by: The identity that created the resource.
:vartype created_by: str
:ivar created_by_type: The type of identity that created the resource. Known values are:
"User", "Application", "ManagedIdentity", and "Key".
:vartype created_by_type: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.CreatedByType
:ivar created_at: The timestamp of resource creation (UTC).
:vartype created_at: ~datetime.datetime
:ivar last_modified_by: The identity that last modified the resource.
:vartype last_modified_by: str
:ivar last_modified_by_type: The type of identity that last modified the resource. Known values
are: "User", "Application", "ManagedIdentity", and "Key".
:vartype last_modified_by_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.CreatedByType
:ivar last_modified_at: The timestamp of resource last modification (UTC).
:vartype last_modified_at: ~datetime.datetime
"""
_attribute_map = {
"created_by": {"key": "createdBy", "type": "str"},
"created_by_type": {"key": "createdByType", "type": "str"},
"created_at": {"key": "createdAt", "type": "iso-8601"},
"last_modified_by": {"key": "lastModifiedBy", "type": "str"},
"last_modified_by_type": {"key": "lastModifiedByType", "type": "str"},
"last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"},
}
def __init__(
self,
*,
created_by: Optional[str] = None,
created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None,
created_at: Optional[datetime.datetime] = None,
last_modified_by: Optional[str] = None,
last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None,
last_modified_at: Optional[datetime.datetime] = None,
**kwargs: Any
) -> None:
"""
:keyword created_by: The identity that created the resource.
:paramtype created_by: str
:keyword created_by_type: The type of identity that created the resource. Known values are:
"User", "Application", "ManagedIdentity", and "Key".
:paramtype created_by_type: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.CreatedByType
:keyword created_at: The timestamp of resource creation (UTC).
:paramtype created_at: ~datetime.datetime
:keyword last_modified_by: The identity that last modified the resource.
:paramtype last_modified_by: str
:keyword last_modified_by_type: The type of identity that last modified the resource. Known
values are: "User", "Application", "ManagedIdentity", and "Key".
:paramtype last_modified_by_type: str or
~azure.mgmt.iothub.v2022_04_30_preview.models.CreatedByType
:keyword last_modified_at: The timestamp of resource last modification (UTC).
:paramtype last_modified_at: ~datetime.datetime
"""
super().__init__(**kwargs)
self.created_by = created_by
self.created_by_type = created_by_type
self.created_at = created_at
self.last_modified_by = last_modified_by
self.last_modified_by_type = last_modified_by_type
self.last_modified_at = last_modified_at
class TagsResource(_serialization.Model):
"""A container holding only the Tags for a resource, allowing the user to update the tags on an
IoT Hub instance.
:ivar tags: Resource tags.
:vartype tags: dict[str, str]
"""
_attribute_map = {
"tags": {"key": "tags", "type": "{str}"},
}
def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword tags: Resource tags.
:paramtype tags: dict[str, str]
"""
super().__init__(**kwargs)
self.tags = tags
class TestAllRoutesInput(_serialization.Model):
"""Input for testing all routes.
:ivar routing_source: Routing source. Known values are: "Invalid", "DeviceMessages",
"TwinChangeEvents", "DeviceLifecycleEvents", "DeviceJobLifecycleEvents",
"DigitalTwinChangeEvents", "DeviceConnectionStateEvents", and "MqttBrokerMessages".
:vartype routing_source: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingSource
:ivar message: Routing message.
:vartype message: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingMessage
:ivar twin: Routing Twin Reference.
:vartype twin: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingTwin
"""
_attribute_map = {
"routing_source": {"key": "routingSource", "type": "str"},
"message": {"key": "message", "type": "RoutingMessage"},
"twin": {"key": "twin", "type": "RoutingTwin"},
}
def __init__(
self,
*,
routing_source: Optional[Union[str, "_models.RoutingSource"]] = None,
message: Optional["_models.RoutingMessage"] = None,
twin: Optional["_models.RoutingTwin"] = None,
**kwargs: Any
) -> None:
"""
:keyword routing_source: Routing source. Known values are: "Invalid", "DeviceMessages",
"TwinChangeEvents", "DeviceLifecycleEvents", "DeviceJobLifecycleEvents",
"DigitalTwinChangeEvents", "DeviceConnectionStateEvents", and "MqttBrokerMessages".
:paramtype routing_source: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingSource
:keyword message: Routing message.
:paramtype message: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingMessage
:keyword twin: Routing Twin Reference.
:paramtype twin: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingTwin
"""
super().__init__(**kwargs)
self.routing_source = routing_source
self.message = message
self.twin = twin
class TestAllRoutesResult(_serialization.Model):
"""Result of testing all routes.
:ivar routes: JSON-serialized array of matched routes.
:vartype routes: list[~azure.mgmt.iothub.v2022_04_30_preview.models.MatchedRoute]
"""
_attribute_map = {
"routes": {"key": "routes", "type": "[MatchedRoute]"},
}
def __init__(self, *, routes: Optional[List["_models.MatchedRoute"]] = None, **kwargs: Any) -> None:
"""
:keyword routes: JSON-serialized array of matched routes.
:paramtype routes: list[~azure.mgmt.iothub.v2022_04_30_preview.models.MatchedRoute]
"""
super().__init__(**kwargs)
self.routes = routes
class TestRouteInput(_serialization.Model):
"""Input for testing route.
All required parameters must be populated in order to send to Azure.
:ivar message: Routing message.
:vartype message: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingMessage
:ivar route: Route properties. Required.
:vartype route: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteProperties
:ivar twin: Routing Twin Reference.
:vartype twin: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingTwin
"""
_validation = {
"route": {"required": True},
}
_attribute_map = {
"message": {"key": "message", "type": "RoutingMessage"},
"route": {"key": "route", "type": "RouteProperties"},
"twin": {"key": "twin", "type": "RoutingTwin"},
}
def __init__(
self,
*,
route: "_models.RouteProperties",
message: Optional["_models.RoutingMessage"] = None,
twin: Optional["_models.RoutingTwin"] = None,
**kwargs: Any
) -> None:
"""
:keyword message: Routing message.
:paramtype message: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingMessage
:keyword route: Route properties. Required.
:paramtype route: ~azure.mgmt.iothub.v2022_04_30_preview.models.RouteProperties
:keyword twin: Routing Twin Reference.
:paramtype twin: ~azure.mgmt.iothub.v2022_04_30_preview.models.RoutingTwin
"""
super().__init__(**kwargs)
self.message = message
self.route = route
self.twin = twin
class TestRouteResult(_serialization.Model):
"""Result of testing one route.
:ivar result: Result of testing route. Known values are: "undefined", "false", and "true".
:vartype result: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.TestResultStatus
:ivar details: Detailed result of testing route.
:vartype details: ~azure.mgmt.iothub.v2022_04_30_preview.models.TestRouteResultDetails
"""
_attribute_map = {
"result": {"key": "result", "type": "str"},
"details": {"key": "details", "type": "TestRouteResultDetails"},
}
def __init__(
self,
*,
result: Optional[Union[str, "_models.TestResultStatus"]] = None,
details: Optional["_models.TestRouteResultDetails"] = None,
**kwargs: Any
) -> None:
"""
:keyword result: Result of testing route. Known values are: "undefined", "false", and "true".
:paramtype result: str or ~azure.mgmt.iothub.v2022_04_30_preview.models.TestResultStatus
:keyword details: Detailed result of testing route.
:paramtype details: ~azure.mgmt.iothub.v2022_04_30_preview.models.TestRouteResultDetails
"""
super().__init__(**kwargs)
self.result = result
self.details = details
class TestRouteResultDetails(_serialization.Model):
"""Detailed result of testing a route.
:ivar compilation_errors: JSON-serialized list of route compilation errors.
:vartype compilation_errors:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RouteCompilationError]
"""
_attribute_map = {
"compilation_errors": {"key": "compilationErrors", "type": "[RouteCompilationError]"},
}
def __init__(
self, *, compilation_errors: Optional[List["_models.RouteCompilationError"]] = None, **kwargs: Any
) -> None:
"""
:keyword compilation_errors: JSON-serialized list of route compilation errors.
:paramtype compilation_errors:
list[~azure.mgmt.iothub.v2022_04_30_preview.models.RouteCompilationError]
"""
super().__init__(**kwargs)
self.compilation_errors = compilation_errors
class UserSubscriptionQuota(_serialization.Model):
"""User subscription quota response.
:ivar id: IotHub type id.
:vartype id: str
:ivar type: Response type.
:vartype type: str
:ivar unit: Unit of IotHub type.
:vartype unit: str
:ivar current_value: Current number of IotHub type.
:vartype current_value: int
:ivar limit: Numerical limit on IotHub type.
:vartype limit: int
:ivar name: IotHub type.
:vartype name: ~azure.mgmt.iothub.v2022_04_30_preview.models.Name
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"type": {"key": "type", "type": "str"},
"unit": {"key": "unit", "type": "str"},
"current_value": {"key": "currentValue", "type": "int"},
"limit": {"key": "limit", "type": "int"},
"name": {"key": "name", "type": "Name"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
type: Optional[str] = None,
unit: Optional[str] = None,
current_value: Optional[int] = None,
limit: Optional[int] = None,
name: Optional["_models.Name"] = None,
**kwargs: Any
) -> None:
"""
:keyword id: IotHub type id.
:paramtype id: str
:keyword type: Response type.
:paramtype type: str
:keyword unit: Unit of IotHub type.
:paramtype unit: str
:keyword current_value: Current number of IotHub type.
:paramtype current_value: int
:keyword limit: Numerical limit on IotHub type.
:paramtype limit: int
:keyword name: IotHub type.
:paramtype name: ~azure.mgmt.iothub.v2022_04_30_preview.models.Name
"""
super().__init__(**kwargs)
self.id = id
self.type = type
self.unit = unit
self.current_value = current_value
self.limit = limit
self.name = name
class UserSubscriptionQuotaListResult(_serialization.Model):
"""Json-serialized array of User subscription quota response.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value:
:vartype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.UserSubscriptionQuota]
:ivar next_link:
:vartype next_link: str
"""
_validation = {
"next_link": {"readonly": True},
}
_attribute_map = {
"value": {"key": "value", "type": "[UserSubscriptionQuota]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(self, *, value: Optional[List["_models.UserSubscriptionQuota"]] = None, **kwargs: Any) -> None:
"""
:keyword value:
:paramtype value: list[~azure.mgmt.iothub.v2022_04_30_preview.models.UserSubscriptionQuota]
"""
super().__init__(**kwargs)
self.value = value
self.next_link = None
| [
"noreply@github.com"
] | Azure.noreply@github.com |
73f4edde57e72fa7e63e4a92f229752ca1fc8510 | 2ebdbbf06978fd60f47933cfffd37a5a5460ee31 | /Sect-A/source/sect07_class/s722_init_class.py | a5713295190ad89f453cb5131794fdb540b71b11 | [] | no_license | lukejskim/sba19-seoulit | f55dd6279d44a7a235a9fa6c008c7f65045c9d0c | 7652c2b718cb8f7efaeca7c2bdf7a5e699bccbce | refs/heads/master | 2020-07-02T22:21:20.393202 | 2019-09-19T03:03:48 | 2019-09-19T03:03:48 | 201,684,313 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | # 클래스 초기화 함수, __init__() 재정의
class MyClass:
def __init__(self, name): # 초기화 함수 재정의
self.name = name
def sayHello(self):
hello = "Hello, " + self.name + "\t It's Good day !"
print(hello)
# 객체 생성, 인스턴스화
# myClass = MyClass()
myClass = MyClass('채영')
myClass.sayHello()
| [
"bluenine52@gmail.com"
] | bluenine52@gmail.com |
e1c8c6814ceb6e0868c93e4d592affaaddbf37ca | 648861586610635c3ce32241384d3056d311ac2e | /imagepy/core/app/imagej.py | 39cc75c0d84cbc431f3e8190c658bd7ca9dc1e34 | [
"BSD-2-Clause"
] | permissive | bigleftgo/imagepy | 80e55386a8f93324d48ccfe5a95d92229af72cbd | a3a30205ba99eb27884cea0ae4d54b60d4efa829 | refs/heads/master | 2020-09-01T15:21:42.080485 | 2020-06-25T01:35:55 | 2020-06-25T01:35:55 | 218,992,134 | 1 | 0 | NOASSERTION | 2019-11-01T13:38:19 | 2019-11-01T13:38:19 | null | UTF-8 | Python | false | false | 20,147 | py | import wx, os, sys
import time, threading
sys.path.append('../../../')
import wx.lib.agw.aui as aui
from sciwx.widgets import MenuBar, ToolBar, ChoiceBook, ParaDialog, WorkFlowPanel
from sciwx.canvas import CanvasFrame
from sciwx.widgets import ProgressBar
from sciwx.grid import GridFrame
from sciwx.mesh import Canvas3DFrame
from sciwx.text import MDFrame, TextFrame
from sciwx.plot import PlotFrame
from skimage.data import camera
from sciapp import App, Source
from sciapp.object import Image
from imagepy import root_dir
from .startup import load_plugins, load_tools, load_widgets
#from .source import *
class ImageJ(wx.Frame, App):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = 'ImagePy',
size = wx.Size(-1,-1), pos = wx.DefaultPosition,
style = wx.RESIZE_BORDER|wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
App.__init__(self)
self.auimgr = aui.AuiManager()
self.auimgr.SetManagedWindow( self )
self.SetSizeHints( wx.Size(600,-1) )
logopath = os.path.join(root_dir, 'data/logo.ico')
self.SetIcon(wx.Icon(logopath, wx.BITMAP_TYPE_ICO))
self.init_menu()
self.init_tool()
self.init_widgets()
self.init_text()
self.init_status()
self._load_all()
self.Fit()
self.Layout()
self.auimgr.Update()
self.Fit()
self.Centre( wx.BOTH )
self.Bind(wx.EVT_CLOSE, self.on_close)
self.Bind(aui.EVT_AUI_PANE_CLOSE, self.on_pan_close)
self.source()
def source(self):
self.manager('color').add('front', (255, 255, 255))
self.manager('color').add('back', (0, 0, 0))
def init_status(self):
self.stapanel = stapanel = wx.Panel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )
sizersta = wx.BoxSizer( wx.HORIZONTAL )
self.txt_info = wx.StaticText( stapanel, wx.ID_ANY, "ImagePy v0.2", wx.DefaultPosition, wx.DefaultSize, 0 )
self.txt_info.Wrap( -1 )
sizersta.Add( self.txt_info, 1, wx.ALIGN_BOTTOM|wx.BOTTOM|wx.LEFT|wx.RIGHT, 2 )
#self.pro_bar = wx.Gauge( stapanel, wx.ID_ANY, 100, wx.DefaultPosition, wx.Size( 100,15 ), wx.GA_HORIZONTAL )
self.pro_bar = ProgressBar(stapanel)
sizersta.Add( self.pro_bar, 0, wx.ALL|wx.ALIGN_CENTER, 0 )
stapanel.SetSizer(sizersta)
class OpenDrop(wx.FileDropTarget):
def __init__(self, app):
wx.FileDropTarget.__init__(self)
self.app = app
def OnDropFiles(self, x, y, path):
self.app.run_macros(["Open>{'path':'%s'}"%i.replace('\\', '/') for i in path])
stapanel.SetDropTarget(OpenDrop(self))
self.auimgr.AddPane( stapanel, aui.AuiPaneInfo() .Bottom() .CaptionVisible( False ).PinButton( True )
.PaneBorder( False ).Dock().Resizable().FloatingSize( wx.DefaultSize ).DockFixed( True )
. MinSize(wx.Size(-1, 20)). MaxSize(wx.Size(-1, 20)).Layer( 10 ) )
def _load_all(self):
lang = Source.manager('config').get('language')
dic = Source.manager('dictionary').get('common', tag=lang) or {}
self.auimgr.GetPane(self.widgets).Caption('Widgets')
for i in self.auimgr.GetAllPanes():
i.Caption(dic[i.caption] if i.caption in dic else i.caption)
self.auimgr.Update()
plgs, errplg = load_plugins()
self.load_menu(plgs)
dtool = Source.manager('tools').get('default')
tols, errtol = load_tools()
self.load_tool(tols, dtool or 'Transform')
wgts, errwgt = load_widgets()
self.load_widget(wgts)
err = errplg + errtol + errwgt
if len(err)>0:
err = [('File', 'Name', 'Error')] + err
cont = '\n'.join(['%-30s\t%-20s\t%s'%i for i in err])
self.show_txt(cont, 'loading error log')
def load_all(self): wx.CallAfter(self._load_all)
def load_menu(self, data):
self.menubar.clear()
lang = Source.manager('config').get('language')
ls = Source.manager('dictionary').gets(tag=lang)
short = Source.manager('shortcut').gets()
acc = self.menubar.load(data)
self.translate(dict([(i,j[i]) for i,j,_ in ls]))(self.menubar)
self.SetAcceleratorTable(acc)
def load_tool(self, data, default=None):
self.toolbar.clear()
lang = Source.manager('config').get('language')
ls = Source.manager('dictionary').gets(tag=lang)
dic = dict([(i,j[i]) for i,j,_ in ls])
for i, (name, tols) in enumerate(data[1]):
name = dic[name] if name in dic else name
self.toolbar.add_tools(name, tols, i==0)
default = dic[default] if default in dic else default
if not default is None: self.toolbar.add_pop(os.path.join(root_dir, 'tools/drop.gif'), default)
self.toolbar.Layout()
def load_widget(self, data):
self.widgets.clear()
lang = Source.manager('config').get('language')
self.widgets.load(data)
for cbk in self.widgets.GetChildren():
for i in range(cbk.GetPageCount()):
dic = Source.manager('dictionary').get(cbk.GetPageText(i), tag=lang) or {}
translate = self.translate(dic)
title = cbk.GetPageText(i)
cbk.SetPageText(i, dic[title] if title in dic else title)
self.translate(dic)(cbk.GetPage(i))
# self.translate(self.widgets)
def init_menu(self):
self.menubar = MenuBar(self)
def init_tool(self):
sizer = wx.BoxSizer(wx.VERTICAL)
self.toolbar = ToolBar(self, False)
def on_help(evt, tol):
lang = Source.manager('config').get('language')
doc = Source.manager('document').get(tol.title, tag=lang)
self.show_md(doc or 'No Document!', tol.title)
self.toolbar.on_help = on_help
self.toolbar.Fit()
self.auimgr.AddPane(self.toolbar, aui.AuiPaneInfo() .Top() .PinButton( True ).PaneBorder( False )
.CaptionVisible( False ).Dock().FloatingSize( wx.DefaultSize ).MinSize(wx.Size( -1,34 )).DockFixed( True )
. BottomDockable( False ).TopDockable( False ).Layer( 10 ) )
def set_background(self, img):
class ImgArtProvider(aui.AuiDefaultDockArt):
def __init__(self, img):
aui.AuiDefaultDockArt.__init__(self)
self.bitmap = wx.Bitmap(img, wx.BITMAP_TYPE_PNG)
def DrawBackground(self, dc, window, orient, rect):
aui.AuiDefaultDockArt.DrawBackground(self, dc, window, orient, rect)
memDC = wx.MemoryDC()
memDC.SelectObject(self.bitmap)
w, h = self.bitmap.GetWidth(), self.bitmap.GetHeight()
dc.Blit((rect[2]-w)//2, (rect[3]-h)//2, w, h, memDC, 0, 0, wx.COPY, True)
self.canvasnb.GetAuiManager().SetArtProvider(ImgArtProvider(img))
def add_task(self, task):
self.task_manager.add(task.title, task)
tasks = self.task_manager.gets()
tasks = [(p.title, lambda t=p:p.prgs) for n,p,t in tasks]
self.pro_bar.SetValue(tasks)
def remove_task(self, task):
self.task_manager.remove(obj=task)
tasks = self.task_manager.gets()
tasks = [(p.title, lambda t=p:p.prgs) for n,p,t in tasks]
self.pro_bar.SetValue(tasks)
def init_widgets(self):
self.widgets = ChoiceBook(self)
self.auimgr.AddPane( self.widgets, aui.AuiPaneInfo() .Right().Caption('Widgets') .PinButton( True ).Hide()
.Float().Resizable().FloatingSize( wx.DefaultSize ).MinSize( wx.Size( 266,300 ) ).Layer( 10 ) )
def init_text(self): return
#self.mdframe = MDNoteFrame(self, 'Sci Document')
#self.txtframe = TextNoteFrame(self, 'Sci Text')
def on_pan_close(self, event):
if event.GetPane().window in [self.toolbar, self.widgets]:
event.Veto()
if hasattr(event.GetPane().window, 'close'):
event.GetPane().window.close()
def on_new_img(self, event):
self.add_img(event.GetEventObject().canvas.image)
self.add_img_win(event.GetEventObject().canvas)
def on_close_img(self, event):
event.GetEventObject().Bind(wx.EVT_ACTIVATE, None)
self.remove_img_win(event.GetEventObject().canvas)
self.remove_img(event.GetEventObject().canvas.image)
event.Skip()
def on_new_tab(self, event):
self.add_tab(event.GetEventObject().grid.table)
self.add_tab_win(event.GetEventObject().grid)
def on_close_tab(self, event):
self.remove_tab_win(event.GetEventObject().grid)
self.remove_tab(event.GetEventObject().grid.table)
event.Skip()
def on_new_mesh(self, event):
self.add_mesh(event.GetEventObject().canvas.mesh)
self.add_mesh_win(event.GetEventObject().canvas)
def on_close_mesh(self, event):
self.remove_mesh(event.GetEventObject().canvas.mesh)
self.remove_mesh_win(event.GetEventObject().canvas)
event.Skip()
def set_info(self, value):
lang = Source.manager('config').get('language')
dics = Source.manager('dictionary').gets(tag=lang)
dic = dict(j for i in dics for j in i[1].items())
value = dic[value] if value in dic else value
wx.CallAfter(self.txt_info.SetLabel, value)
def set_progress(self, value):
v = max(min(value, 100), 0)
self.pro_bar.SetValue(v)
if value==-1:
self.pro_bar.Hide()
elif not self.pro_bar.IsShown():
self.pro_bar.Show()
self.stapanel.GetSizer().Layout()
self.pro_bar.Update()
def on_close(self, event):
print('close')
#ConfigManager.write()
self.auimgr.UnInit()
del self.auimgr
self.Destroy()
Source.manager('config').write()
sys.exit()
def _show_img(self, img, title=None):
cframe = CanvasFrame(self, True)
canvas = cframe.canvas
if not title is None:
canvas.set_imgs(img)
canvas.image.name = title
else: canvas.set_img(img)
cframe.Bind(wx.EVT_ACTIVATE, self.on_new_img)
cframe.Bind(wx.EVT_CLOSE, self.on_close_img)
cframe.Show()
def show_img(self, img, title=None):
wx.CallAfter(self._show_img, img, title)
def _show_table(self, tab, title):
cframe = GridFrame(self)
grid = cframe.grid
grid.set_data(tab)
if not title is None:
grid.table.name = title
cframe.Bind(wx.EVT_ACTIVATE, self.on_new_tab)
cframe.Bind(wx.EVT_CLOSE, self.on_close_tab)
cframe.Show()
def show_table(self, tab, title=None):
wx.CallAfter(self._show_table, tab, title)
def show_plot(self, title):
fig = PlotFrame(self)
fig.figure.title = title
return fig
def _show_md(self, cont, title='ImagePy'):
mdframe = MDFrame(self)
mdframe.SetIcon(self.GetIcon())
mdframe.set_cont(cont)
mdframe.mdpad.title = title
mdframe.Show(True)
def show_md(self, cont, title='ImagePy'):
wx.CallAfter(self._show_md, cont, title)
def _show_workflow(self, cont, title='ImagePy'):
pan = WorkFlowPanel(self)
pan.SetValue(cont)
info = aui.AuiPaneInfo(). DestroyOnClose(True). Left(). Caption(title) .PinButton( True ) \
.Resizable().FloatingSize( wx.DefaultSize ).Dockable(False).Float().Top().Layer( 5 )
pan.Bind(None, lambda x:self.run_macros(['%s>None'%x]))
self.auimgr.AddPane(pan, info)
self.auimgr.Update()
def show_workflow(self, cont, title='ImagePy'):
wx.CallAfter(self._show_workflow, cont, title)
def _show_txt(self, cont, title='ImagePy'):
TextFrame(self, title, cont).Show()
def show_txt(self, cont, title='ImagePy'):
wx.CallAfter(self._show_txt, cont, title)
def _show_mesh(self, mesh=None, title=None):
if mesh is None:
cframe = Canvas3DFrame(self)
canvas = cframe.canvas
canvas.mesh.name = 'Surface'
elif hasattr(mesh, 'vts'):
canvas = self.get_mesh_win()
if canvas is None:
cframe = Canvas3DFrame(self)
canvas = cframe.canvas
canvas.mesh.name = 'Surface'
canvas.add_surf(title, mesh)
else:
cframe = Canvas3DFrame(self)
canvas = cframe.canvas
canvas.set_mesh(mesh)
canvas.GetParent().Show()
canvas.GetParent().Bind(wx.EVT_ACTIVATE, self.on_new_mesh)
canvas.GetParent().Bind(wx.EVT_CLOSE, self.on_close_mesh)
self.add_mesh(canvas.mesh)
self.add_mesh_win(canvas)
def show_mesh(self, mesh=None, title=None):
wx.CallAfter(self._show_mesh, mesh, title)
def show_widget(self, panel, title='Widgets'):
print(self.stapanel.GetSize(), '===========')
obj = self.manager('widget').get(panel.title)
if obj is None:
obj = panel(self, self)
self.manager('widget').add(panel.title, obj)
self.auimgr.AddPane(obj, aui.AuiPaneInfo().Caption(title).Left().Layer( 15 ).PinButton( True )
.Float().Resizable().FloatingSize( wx.DefaultSize ).Dockable(True)) #.DestroyOnClose())
lang = Source.manager('config').get('language')
dic = Source.manager('dictionary').get(obj.title, tag=lang) or {}
info = self.auimgr.GetPane(obj)
info.Show(True).Caption(dic[obj.title] if obj.title in dic else obj.title)
self.translate(dic)(obj)
self.Layout()
self.auimgr.Update()
print(self.stapanel.GetSize(), '===========')
def switch_widget(self, visible=None):
info = self.auimgr.GetPane(self.widgets)
info.Show(not info.IsShown() if visible is None else visible)
self.auimgr.Update()
def switch_toolbar(self, visible=None):
info = self.auimgr.GetPane(self.toolbar)
info.Show(not info.IsShown() if visible is None else visible)
self.auimgr.Update()
def switch_table(self, visible=None):
info = self.auimgr.GetPane(self.tablenbwrap)
info.Show(not info.IsShown() if visible is None else visible)
self.auimgr.Update()
def close_img(self, name=None):
names = self.get_img_name() if name is None else [name]
for name in names:
idx = self.canvasnb.GetPageIndex(self.get_img_win(name))
self.remove_img(self.get_img_win(name).image)
self.remove_img_win(self.get_img_win(name))
self.canvasnb.DeletePage(idx)
def close_table(self, name=None):
names = self.get_tab_name() if name is None else [name]
for name in names:
idx = self.tablenb.GetPageIndex(self.get_tab_win(name))
self.remove_tab(self.get_tab_win(name).table)
self.remove_tab_win(self.get_tab_win(name))
self.tablenb.DeletePage(idx)
def record_macros(self, cmd):
obj = self.manager('widget').get(name='Macros Recorder')
if obj is None or not obj.IsShown(): return
wx.CallAfter(obj.write, cmd)
def run_macros(self, cmd, callafter=None):
cmds = [i for i in cmd]
def one(cmds, after):
cmd = cmds.pop(0)
title, para = cmd.split('>')
print(title, para)
plg = Source.manager('plugin').get(name=title)()
after = lambda cmds=cmds: one(cmds, one)
if len(cmds)==0: after = callafter
wx.CallAfter(plg.start, self, eval(para), after)
one(cmds, None)
def show(self, tag, cont, title):
tag = tag or 'img'
if tag=='img':
self.show_img([cont], title)
elif tag=='imgs':
self.show_img(cont, title)
elif tag=='tab':
self.show_table(cont, title)
elif tag=='mc':
self.run_macros(cont)
elif tag=='md':
self.show_md(cont, title)
elif tag=='wf':
self.show_workflow(cont, title)
else: self.alert('no view for %s!'%tag)
def info(self, cont):
wx.CallAfter(self.txt_info.SetLabel, cont)
def _alert(self, info, title='ImagePy'):
dialog=wx.MessageDialog(self, info, title, wx.OK)
dialog.ShowModal() == wx.ID_OK
dialog.Destroy()
def alert(self, info, title='ImagePy'):
wx.CallAfter(self._alert, info, title)
def yes_no(self, info, title='ImagePy'):
dialog = wx.MessageDialog(self, info, title, wx.YES_NO | wx.CANCEL)
rst = dialog.ShowModal()
dialog.Destroy()
dic = {wx.ID_YES:'yes', wx.ID_NO:'no', wx.ID_CANCEL:'cancel'}
return dic[rst]
def getpath(self, title, filt, io, name=''):
filt = '|'.join(['%s files (*.%s)|*.%s'%(i.upper(),i,i) for i in filt])
dic = {'open':wx.FD_OPEN, 'save':wx.FD_SAVE}
dialog = wx.FileDialog(self, title, '', name, filt, dic[io])
rst = dialog.ShowModal()
path = dialog.GetPath() if rst == wx.ID_OK else None
dialog.Destroy()
return path
def show_para(self, title, view, para, on_handle=None, on_ok=None,
on_cancel=None, on_help=None, preview=False, modal=True):
lang = Source.manager('config').get('language')
dic = Source.manager('dictionary').get(name=title, tag=lang)
dialog = ParaDialog(self, title)
dialog.init_view(view, para, preview, modal=modal,
app=self, translate=self.translate(dic))
dialog.Bind('cancel', on_cancel)
dialog.Bind('parameter', on_handle)
dialog.Bind('commit', on_ok)
dialog.Bind('help', on_help)
return dialog.show()
def translate(self, dic):
dic = dic or {}
if isinstance(dic, list):
dic = dict(j for i in dic for j in i.items())
def lang(x): return dic[x] if x in dic else x
def trans(frame):
if hasattr(frame, 'GetChildren'):
for i in frame.GetChildren(): trans(i)
if isinstance(frame, wx.MenuBar):
for i in frame.GetMenus(): trans(i[0])
for i in range(frame.GetMenuCount()):
frame.SetMenuLabel(i, lang(frame.GetMenuLabel(i)))
return 'not set title'
if isinstance(frame, wx.Menu):
for i in frame.GetMenuItems(): trans(i)
return 'not set title'
if isinstance(frame, wx.MenuItem):
frame.SetItemLabel(lang(frame.GetItemLabel()))
trans(frame.GetSubMenu())
if isinstance(frame, wx.Button):
frame.SetLabel(lang(frame.GetLabel()))
if isinstance(frame, wx.CheckBox):
frame.SetLabel(lang(frame.GetLabel()))
if isinstance(frame, wx.StaticText):
frame.SetLabel(lang(frame.GetLabel()))
if hasattr(frame, 'SetTitle'):
frame.SetTitle(lang(frame.GetTitle()))
if isinstance(frame, wx.MessageDialog):
frame.SetMessage(lang(frame.GetMessage()))
if isinstance(frame, wx.Notebook):
for i in range(frame.GetPageCount()):
frame.SetPageText(i, lang(frame.GetPageText(i)))
if hasattr(frame, 'Layout'): frame.Layout()
return trans
if __name__ == '__main__':
import numpy as np
import pandas as pd
app = wx.App(False)
frame = ImageJ(None)
frame.Show()
frame.show_img([np.zeros((512, 512), dtype=np.uint8)], 'zeros')
#frame.show_img(None)
frame.show_table(pd.DataFrame(np.arange(100).reshape((10,10))), 'title')
'''
frame.show_md('abcdefg', 'md')
frame.show_md('ddddddd', 'md')
frame.show_txt('abcdefg', 'txt')
frame.show_txt('ddddddd', 'txt')
'''
app.MainLoop() | [
"imagepy@sina.com"
] | imagepy@sina.com |
0be0ac7c19336cdca2defea65d64f98699597172 | d5cc0c9f8d94e9d020b3e50c0a125d2041dd3baa | /AttendifySite(Flask)/env/lib/python3.6/site-packages/turicreate/data_structures/sarray_builder.py | 001e211365f0220b1925c0e5a6334dd1fb301ca3 | [
"MIT"
] | permissive | arnavgup/Attendify_iOS | be896579de4560cff36a4b163384d0eeabbb7dd9 | c2efc3273a7b99c09d918567718ac87d7f0179d8 | refs/heads/master | 2022-10-31T13:16:11.081902 | 2018-12-09T00:11:42 | 2018-12-09T00:11:42 | 158,432,022 | 3 | 2 | MIT | 2022-10-10T10:53:53 | 2018-11-20T18:10:16 | Swift | UTF-8 | Python | false | false | 4,372 | py | # -*- coding: utf-8 -*-
# Copyright © 2017 Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can
# be found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
'''
An interface for creating an SArray over time.
'''
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
from ..cython.cy_sarray_builder import UnitySArrayBuilderProxy
from .sarray import SArray
class SArrayBuilder(object):
"""
An interface to incrementally build an SArray element by element.
Once closed, the SArray cannot be "reopened" using this interface.
Parameters
----------
num_segments : int, optional
Number of segments that can be written in parallel.
history_size : int, optional
The number of elements to be cached as history. Caches the last
`history_size` elements added with `append` or `append_multiple`.
dtype : type, optional
The type the resulting SArray will be. If None, the resulting SArray
will take on the type of the first non-None value it receives.
Returns
-------
out : SArrayBuilder
Examples
--------
>>> from turicreate import SArrayBuilder
>>> sb = SArrayBuilder()
>>> sb.append(1)
>>> sb.append([2,3])
>>> sb.close()
dtype: int
Rows: 3
[1, 2, 3]
"""
def __init__(self, dtype, num_segments=1, history_size=10):
self._builder = UnitySArrayBuilderProxy()
if dtype is None:
dtype = type(None)
self._builder.init(num_segments, history_size, dtype)
self._block_size = 1024
def append(self, data, segment=0):
"""
Append a single element to an SArray.
Throws a RuntimeError if the type of `data` is incompatible with
the type of the SArray.
Parameters
----------
data : any SArray-supported type
A data element to add to the SArray.
segment : int
The segment to write this element. Each segment is numbered
sequentially, starting with 0. Any value in segment 1 will be after
any value in segment 0, and the order of elements in each segment is
preserved as they are added.
"""
self._builder.append(data, segment)
def append_multiple(self, data, segment=0):
"""
Append multiple elements to an SArray.
Throws a RuntimeError if the type of `data` is incompatible with
the type of the SArray.
Parameters
----------
data : any SArray-supported type
A data element to add to the SArray.
segment : int
The segment to write this element. Each segment is numbered
sequentially, starting with 0. Any value in segment 1 will be after
any value in segment 0, and the order of elements in each segment is
preserved as they are added.
"""
if not hasattr(data, '__iter__'):
raise TypeError("append_multiple must be passed an iterable object")
tmp_list = []
for i in data:
tmp_list.append(i)
if len(tmp_list) >= self._block_size:
self._builder.append_multiple(tmp_list, segment)
tmp_list = []
if len(tmp_list) > 0:
self._builder.append_multiple(tmp_list, segment)
def get_type(self):
"""
The type the result SArray will be if `close` is called.
"""
return self._builder.get_type()
def read_history(self, num=10, segment=0):
"""
Outputs the last `num` elements that were appended either by `append` or
`append_multiple`.
Returns
-------
out : list
"""
if num < 0:
num = 0
if segment < 0:
raise TypeError("segment must be >= 0")
return self._builder.read_history(num, segment)
def close(self):
"""
Creates an SArray from all values that were appended to the
SArrayBuilder. No function that appends data may be called after this
is called.
Returns
-------
out : SArray
"""
return SArray(_proxy=self._builder.close())
| [
"gyao@andrew.cmu.edu"
] | gyao@andrew.cmu.edu |
bbe13c6fc51db82a0f3ec393b683be87f37be48d | e217f190b316071ca83892c9e25385d69f5bb22f | /pytext/config/serialize.py | 7dedb7a3f92de998db748a2ab23a4ffd2a0615f2 | [
"BSD-3-Clause"
] | permissive | VonRosenchild/pytext | 49bb2ef3fce3f23ac66b7dd061f133fd4f49e618 | 97f0d503a354a536bbd9ef8913bed5155d3801de | refs/heads/master | 2020-06-29T09:30:14.583895 | 2019-08-03T01:09:20 | 2019-08-03T01:12:29 | 200,499,490 | 1 | 1 | NOASSERTION | 2019-08-04T13:58:53 | 2019-08-04T13:58:52 | null | UTF-8 | Python | false | false | 9,106 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from enum import Enum
from typing import Dict, List, Tuple, Union
from .component import Registry
from .config_adapter import upgrade_to_latest
from .pytext_config import PyTextConfig
class ConfigParseError(Exception):
pass
class UnionTypeError(ConfigParseError):
pass
class EnumTypeError(ConfigParseError):
pass
class MissingValueError(ConfigParseError):
pass
class IncorrectTypeError(Exception):
pass
def _canonical_typename(cls):
if cls.__name__.endswith(".Config"):
return cls.__name__[: -len(".Config")]
return cls.__name__
def _extend_tuple_type(cls, value):
sub_cls_list = list(cls.__args__)
if len(sub_cls_list) != len(value):
if len(sub_cls_list) != 2 or sub_cls_list[1] is not Ellipsis:
raise ConfigParseError(
f"{len(value)} values found which is more than number of types in tuple {cls}"
)
del sub_cls_list[1]
sub_cls_list.extend((cls.__args__[0],) * (len(value) - len(sub_cls_list)))
return sub_cls_list
def _union_from_json(subclasses, json_obj):
if not _is_dict(json_obj):
raise IncorrectTypeError(
f"incorrect Union value {json_obj} for union {subclasses}"
)
subclasses_dict = {}
for subclass in subclasses:
if type(None) != subclass:
if getattr(subclass, "__EXPANSIBLE__", False):
children = Registry.subconfigs(subclass)
for child in children:
subclasses_dict[_canonical_typename(child).lower()] = child
else:
subclasses_dict[_canonical_typename(subclass).lower()] = subclass
type_name = list(json_obj)[0].lower()
if len(json_obj) == 1 and type_name in subclasses_dict:
json_obj = next(iter(json_obj.values()))
else:
type_name = next(iter(subclasses_dict))
print(
f"can not find class type in json, trying with first class "
+ f"{type_name} in the union"
)
try:
return _value_from_json(subclasses_dict[type_name], json_obj)
except Exception as e:
raise UnionTypeError(
f"failed to parse union {subclasses} from json payload {json_obj}"
) from e
def _is_optional(cls):
return _get_class_type(cls) == Union and type(None) in cls.__args__
def _enum_from_json(enum_cls, json_obj):
for e in enum_cls:
if e.value == json_obj:
return e
raise EnumTypeError(f"invalid enum value {json_obj} for {enum_cls}")
def _value_from_json(cls, value):
cls_type = _get_class_type(cls)
if value is None:
return value
# Unions must be first because Union explicitly doesn't
# support __subclasscheck__.
# optional with more than 2 classes is treated as Union
elif _is_optional(cls) and len(cls.__args__) == 2:
sub_cls = cls.__args__[0] if type(None) != cls.__args__[0] else cls.__args__[1]
return _value_from_json(sub_cls, value)
# nested config
elif hasattr(cls, "_fields"):
return config_from_json(cls, value)
elif cls_type == Union:
return _union_from_json(cls.__args__, value)
elif issubclass(cls_type, Enum):
return _enum_from_json(cls, value)
elif issubclass(cls_type, List):
sub_cls = cls.__args__[0]
return [_value_from_json(sub_cls, v) for v in value]
elif issubclass(cls_type, Tuple):
return tuple(
_value_from_json(c, v)
for c, v in zip(_extend_tuple_type(cls, value), value)
)
elif issubclass(cls_type, Dict):
sub_cls = cls.__args__[1]
return {key: _value_from_json(sub_cls, v) for key, v in value.items()}
# built in types
return cls(value)
def _is_type_specifier(value):
if not _is_dict(value) or len(value) != 1:
return False
name = next(iter(value))
return name[0] == name[0].upper()
def _try_component_config_from_json(cls, value):
if _is_type_specifier(value):
options = Registry.subconfigs(cls)
type_name = list(value)[0]
for option in options:
if type_name.lower() == _canonical_typename(option).lower():
return _value_from_json(option, value[type_name])
else:
raise Exception(f"could not find specified component class {type_name}")
return None
def pytext_config_from_json(json_obj, ignore_fields=(), auto_upgrade=True):
if auto_upgrade:
json_obj = upgrade_to_latest(json_obj)
return config_from_json(PyTextConfig, json_obj, ignore_fields)
def config_from_json(cls, json_obj, ignore_fields=()):
if getattr(cls, "__EXPANSIBLE__", False):
component_config = _try_component_config_from_json(cls, json_obj)
if component_config:
return component_config
parsed_dict = {}
if not hasattr(cls, "_fields"):
raise IncorrectTypeError(f"{cls} is not a valid config class")
cls_name = getattr(cls, "__name__", cls)
# Non-EXPANSIBLE classes can be found in configs
cls_name_wo_config = cls_name.split(".")[0]
unknown_fields = (
set(json_obj)
- {f[0] for f in cls.__annotations__.items()}
- {cls_name_wo_config}
)
if unknown_fields:
cls_fields = {f[0] for f in cls.__annotations__.items()}
raise ConfigParseError(
f"Unknown fields for class {cls_name} with fields {cls_fields} \
detected in config json: {unknown_fields}"
)
for field, f_cls in cls.__annotations__.items():
value = None
is_optional = _is_optional(f_cls)
if field not in json_obj:
if field in cls._field_defaults:
# if using default value, no conversion is needed
value = cls._field_defaults.get(field)
else:
try:
value = _value_from_json(f_cls, json_obj[field])
except ConfigParseError:
raise
except Exception as e:
raise ConfigParseError(
f"failed to parse {field} to {f_cls} with json payload \
{json_obj[field]}"
) from e
# validate value
if value is None and not is_optional:
raise MissingValueError(
f"missing value for {field} in class {cls_name} with json {json_obj}"
)
parsed_dict[field] = value
return cls(**parsed_dict)
def _value_to_json(cls, value):
cls_type = _get_class_type(cls)
assert _is_optional(cls) or value is not None
if value is None:
return value
# optional with more than 2 classes is treated as Union
elif _is_optional(cls) and len(cls.__args__) == 2:
sub_cls = cls.__args__[0] if type(None) != cls.__args__[0] else cls.__args__[1]
return _value_to_json(sub_cls, value)
elif cls_type == Union or getattr(cls, "__EXPANSIBLE__", False):
real_cls = type(value)
if hasattr(real_cls, "_fields"):
value = config_to_json(real_cls, value)
return {_canonical_typename(real_cls): value}
# nested config
elif hasattr(cls, "_fields"):
return config_to_json(cls, value)
elif issubclass(cls_type, Enum):
return value.value
elif issubclass(cls_type, List):
sub_cls = cls.__args__[0]
return [_value_to_json(sub_cls, v) for v in value]
elif issubclass(cls_type, Tuple):
return tuple(
_value_to_json(c, v) for c, v in zip(_extend_tuple_type(cls, value), value)
)
elif issubclass(cls_type, Dict):
sub_cls = cls.__args__[1]
return {key: _value_to_json(sub_cls, v) for key, v in value.items()}
return value
def config_to_json(cls, config_obj):
json_result = {}
if not hasattr(cls, "_fields"):
raise IncorrectTypeError(f"{cls} is not a valid config class")
for field, f_cls in cls.__annotations__.items():
value = getattr(config_obj, field)
json_result[field] = _value_to_json(f_cls, value)
return json_result
def _get_class_type(cls):
"""
type(cls) has an inconsistent behavior between 3.6 and 3.7 because of
changes in the typing module. We therefore rely on __extra (3.6) and
__origin__ (3.7), present only in classes from typing to extract the origin
of the class for comparison, otherwise default to the type sent directly
:param cls: class to infer
:return: class or in the case of classes from typing module, the real type
(Union, List) of the created object
"""
return getattr(cls, "__extra__", getattr(cls, "__origin__", cls))
def _is_dict(obj):
"""support all dict-like types
"""
return hasattr(obj, "__contains__") and hasattr(obj, "items")
def parse_config(config_json):
"""
Parse PyTextConfig object from parameter string or parameter file
"""
if "config" in config_json:
config_json = config_json["config"]
return pytext_config_from_json(config_json)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
a7906bf87088132f1a08cef90dff1ea329cfa1ed | c849b2d67cb4906d8ba8ea45aa7fce2170bbb46f | /sources/Tosafot Yom Tov/tosafot_yom_tov_on_tahorot.py | c8e933c838128950a497e2bc67e91ec41efe6186 | [] | no_license | BenjaminKozuch/Sefaria-Data | 8d7452ab7efc95f09ca44e658ee8df1ab2ca84a3 | f154d79ed20f907aff8880c684536c22f970a8a5 | refs/heads/master | 2020-05-29T11:04:35.975262 | 2016-04-05T03:00:26 | 2016-04-05T03:00:26 | 54,200,952 | 0 | 0 | null | 2016-03-18T12:53:54 | 2016-03-18T12:53:54 | null | UTF-8 | Python | false | false | 1,840 | py | # -*- coding: utf-8 -*-
import urllib
import urllib2
from urllib2 import URLError, HTTPError
import json
import pdb
import os
import sys
p = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, p)
os.environ['DJANGO_SETTINGS_MODULE'] = "sefaria.settings"
from local_settings import *
sys.path.insert(0, SEFARIA_PROJECT_PATH)
from sefaria.model import *
def post_index(index):
url = SEFARIA_SERVER+'api/v2/raw/index/'+index["title"].replace(" ", "_")
indexJSON = json.dumps(index)
values = {
'json': indexJSON,
'apikey': API_KEY
}
data = urllib.urlencode(values)
req = urllib2.Request(url, data)
try:
response = urllib2.urlopen(req)
print response.read()
except HTTPError, e:
print 'Error code: ', e.code
root = SchemaNode()
root.add_title("Tosafot Yom Tov on Mishnah Tahorot", "en", primary=True)
root.add_title(u"תוספות יום טוב על משנה טהרות", "he", primary=True)
root.key = "tosafot_yom_tov_tahorot"
sections = [("Tahorot", u"טהרות", 1)]
for sec in sections:
if sec[2] == 1:
intro_node = JaggedArrayNode()
intro_node.add_title(sec[0]+", Introduction", "en", primary=True)
intro_node.add_title(sec[1]+u", הקדמה", "he", primary=True)
intro_node.key = 'intro'+sec[0]
intro_node.sectionNames = ["Paragraph"]
intro_node.depth = 1
intro_node.addressTypes = ["Integer"]
root.append(intro_node)
main_node = JaggedArrayNode()
main_node.default = True
main_node.key = "default"
main_node.sectionNames = ["Perek", "Mishnah", "Comment"]
main_node.depth = 3
main_node.addressTypes = ["Integer", "Integer", "Integer"]
root.append(main_node)
root.validate()
index = {
"title": "Tosafot Yom Tov on Mishnah Tahorot",
"categories": ["Commentary2", "Mishnah", "Tosafot Yom Tov"],
"schema": root.serialize()
}
post_index(index)
| [
"skaplan@brandeis.edu"
] | skaplan@brandeis.edu |
3927ef4df50ddce90f329d071647ae915d3e27ae | 78f3fe4a148c86ce9b80411a3433a49ccfdc02dd | /2017/07/millennial-vote-20170731/graphic_config.py | 94b1323068174ac14ed698e7140ffa728e464d7e | [] | no_license | nprapps/graphics-archive | 54cfc4d4d670aca4d71839d70f23a8bf645c692f | fe92cd061730496cb95c9df8fa624505c3b291f8 | refs/heads/master | 2023-03-04T11:35:36.413216 | 2023-02-26T23:26:48 | 2023-02-26T23:26:48 | 22,472,848 | 16 | 7 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | #!/usr/bin/env python
import base_filters
COPY_GOOGLE_DOC_KEY = '17ZezeFdg7cYvdo04pV89-AcyMkSSsM_V27_4R2ykne4'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
| [
"ahurt@npr.org"
] | ahurt@npr.org |
ea9de5a0f9467ec173e776c67c0e3726fbc4e972 | 725ce8167897de0ffd42b97b7aefff43686b0d33 | /barbados/caches/tablescan.py | cf8c65e77e36b7438733656b6ae9e607decfbee9 | [] | no_license | vinceblake/barbados | a510424f82d77066b9b6fa0e1d4641cbbeb5f138 | 28b6c691e5c8150f51b8ee57a99239232b1417ef | refs/heads/master | 2023-02-15T07:54:47.381960 | 2021-01-13T02:51:49 | 2021-01-13T02:51:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,261 | py | import json
from barbados.caches import Caches
from barbados.caches.base import CacheBase
from barbados.services.registry import Registry
from barbados.services.cache import Cache
from barbados.serializers import ObjectSerializer
from barbados.models.cocktailmodel import CocktailModel
from barbados.models.inventorymodel import InventoryModel
from barbados.models.ingredientmodel import IngredientModel
from barbados.models.menumodel import MenuModel
from barbados.factories.cocktailfactory import CocktailFactory
from barbados.factories.inventoryfactory import InventoryFactory
from barbados.factories.ingredientfactory import IngredientFactory
from barbados.factories.menufactory import MenuFactory
class TableScanCache(CacheBase):
@property
def cache_key(self):
raise NotImplementedError
@property
def model_class(self):
raise NotImplementedError
@property
def factory_class(self):
raise NotImplementedError
@classmethod
def populate(cls):
"""
Populate the cache with its expected value(s).
:return: None
"""
pgconn = Registry.get_database_connection()
with pgconn.get_session() as session:
cache_objects = []
objects = cls.factory_class.produce_all_objs(session=session)
for result_object in objects:
cache_objects.append(ObjectSerializer.serialize(result_object, 'dict'))
Cache.set(cls.cache_key, json.dumps(cache_objects))
class CocktailScanCache(TableScanCache):
cache_key = 'cocktail_scan_cache'
model_class = CocktailModel
factory_class = CocktailFactory
class IngredientScanCache(TableScanCache):
cache_key = 'ingredient_scan_cache'
model_class = IngredientModel
factory_class = IngredientFactory
class MenuScanCache(TableScanCache):
cache_key = 'menu_scan_cache'
model_class = MenuModel
factory_class = MenuFactory
class InventoryScanCache(TableScanCache):
cache_key = 'inventory_scan_cache'
model_class = InventoryModel
factory_class = InventoryFactory
Caches.register_cache(CocktailScanCache)
Caches.register_cache(IngredientScanCache)
Caches.register_cache(MenuScanCache)
Caches.register_cache(InventoryScanCache)
| [
"grant@grantcohoe.com"
] | grant@grantcohoe.com |
d12fe4bd8d208fa142710b1ec665c2531c3f9215 | 8d67a78a26b455aaeb3bfa5e38de8460e2719437 | /keras/autoencoder/pt4/conv_2.py | d61f122987aa83364e72106a0fd73e26a0bd7a60 | [] | no_license | crvogt/functions_test | bfb5ebf87c86f61f31ad1a9e8fb8c0333385c582 | bce390a68c6caa91d29a23f97edd2ae547d385bc | refs/heads/master | 2023-03-11T10:21:10.013486 | 2023-02-18T05:44:21 | 2023-02-18T05:44:21 | 27,519,512 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,649 | py | '''
fit_generator ultimately calls train_on_batch which allows for x to be a dictionary.
Also, it could be a list, in which casex is expected to map 1:1 to the inputs defined in Model(input=[in1, ...], ...)
Note that x could also be a list (one input array per input). Both are allowed.
'''
import keras
from keras.layers import Input, Conv2D, MaxPooling2D, Dense, Flatten, Lambda, UpSampling2D
from keras.models import Model
from keras.preprocessing import image
import keras.backend as K
import matplotlib.pyplot as plt
import numpy as np
import random
import tensorflow as tf
import os
from PIL import Image
import sys
def sampling(args):
"""Reparameterization trick by sampling fr an isotropic unit Gaussian.
# Arguments:
args (tensor): mean and log of variance of Q(z|X)
# Returns:
z (tensor): sampled latent vector
"""
z_mean, z_log_var = args
batch = K.shape(z_mean)[0]
dim = K.int_shape(z_mean)[1]
# by default, random_normal has mean=0 and std=1.0
epsilon = K.random_normal(shape=(batch, dim))
return z_mean + K.exp(0.5 * z_log_var) * epsilon
def my_gen():
'''Generator used by keras.models.sequential.fit_generator
to yield batches of data. Such a generator is required.
'''
'''
You might try to put everything in a while 1 loop, and make sure everything
is just constantly shuffling (ie, always a rand set of vertices and internal Points)
'''
dataPath = "/home/carson/libs/keras_tests/"
batch_count = 0
while batch_count < 5:
trainSetIter = random.randint(1,(len(os.listdir(dataPath))))
vertPath = dataPath + "train_set_" + str(trainSetIter) + "/vertices"
internalPath = dataPath + "train_set_" + str(trainSetIter) + "/internal_points"
vertImages = []
#Now get the positions, starting with theta
vert1Pose = []
vert2Pose = []
vert3Pose = []
splitResult = []
for file in os.listdir(vertPath):
if file.endswith(".mat"):
splitResult = file.split("_")
resultVec = []
for file in os.listdir(vertPath):
if file.endswith(".mat"):
splitResult = file.split(".")
resultVec.append(splitResult[0])
individualVec = []
for val in resultVec:
splitResult = val.split("_")
individualVec.append(splitResult)
vertImages.append(individualVec[0][0]+".jpg")
vertImages.append(individualVec[1][0]+".jpg")
vertImages.append(individualVec[2][0]+".jpg")
print(vertImages)
vert1Pose.append(float(individualVec[0][1] + "." + individualVec[0][2]))
vert1Pose.append(float(individualVec[0][3] + "." + individualVec[0][4]))
vert2Pose.append(float(individualVec[1][1] + "." + individualVec[1][2]))
vert2Pose.append(float(individualVec[1][3] + "." + individualVec[1][4]))
vert3Pose.append(float(individualVec[2][1] + "." + individualVec[2][2]))
vert3Pose.append(float(individualVec[2][3] + "." + individualVec[2][4]))
print(vert2Pose)
possibleVals = []
# bodyImg = []
bodyPose = []
#Now do this for a random value in the body directory
for file in os.listdir(internalPath):
if file.endswith(".mat"):
possibleVals.append(file)
#Choose random file
bodyFile = possibleVals[random.randint(0,(len(possibleVals)-1))]
#Now parse it
resultVec = []
splitResult = bodyFile.split(".")
resultVec.append(splitResult[0])
individualVec = []
for val in resultVec:
splitResult = val.split("_")
individualVec.append(splitResult)
bodyImgPath = individualVec[0][0] + ".jpg"
# print(bodyImgPath)
bodyPose.append(float(individualVec[0][1] + "." + individualVec[0][2]))
bodyPose.append(float(individualVec[0][3] + "." + individualVec[0][4]))
#Now we have bodyPose, bodyImg, vert1Pose, vert2Pose, vert3Pose, vertImages
vertImage1 = image.load_img(vertPath + '/' + vertImages[0], grayscale=True)
vertImage1 = image.img_to_array(vertImage1)
vertImage1 = vertImage1/255.
vertImage2 = image.load_img(vertPath + '/' + vertImages[0], grayscale=True)
vertImage2 = image.img_to_array(vertImage1)
vertImage2 = vertImage1/255.
vertImage3 = image.load_img(vertPath + '/' + vertImages[0], grayscale=True)
vertImage3 = image.img_to_array(vertImage1)
vertImage3 = vertImage1/255.
imgLength = vertImage1.shape[0]*vertImage1.shape[1]*vertImage1.shape[2]
imgShape = vertImage1.shape
print(bodyImgPath)
bodyImg = image.load_img(internalPath + '/' + bodyImgPath, grayscale=True)
bodyImg = image.img_to_array(bodyImg)
bodyImg = bodyImg/255.
# vertImage1 = np.reshape(vertImage1, [-1, imgLength])
vertImage1 = np.reshape(vertImage1, [1, imgShape[0], imgShape[1], imgShape[2]])
vertImage2 = np.reshape(vertImage2, [1, imgShape[0], imgShape[1], imgShape[2]])
vertImage3 = np.reshape(vertImage3, [1, imgShape[0], imgShape[1], imgShape[2]])
bodyImg = np.reshape(bodyImg, [1, imgShape[0], imgShape[1], imgShape[2]])
print('bodyImg shape')
print(bodyImg.shape)
vert1Pose = np.array(vert1Pose).reshape(1, 2)
vert2Pose = np.array(vert2Pose).reshape(1, 2)
print(vert2Pose.shape)
vert3Pose = np.array(vert3Pose).reshape(1, 2)
print(vert3Pose.shape)
bodyPose = np.array(bodyPose).reshape(1, 2)
print(bodyPose.shape)
# bodyImg = np.array(bodyImg)
inputList = [vertImage1, vertImage2, vertImage3, vert1Pose, vert2Pose, vert3Pose, bodyPose]
returnTuple = ([inputList, bodyImg])
yield returnTuple
inputList = []
returnTuple = ()
batch_count += 1
h = 480
w = 640
channels = 1
midDim = 1000
latentDim = 10
#Inputs
image1 = Input(shape=(h, w, channels))
image2 = Input(shape=(h, w, channels))
image3 = Input(shape=(h, w, channels))
pose1 = Input(shape=(2,))
pose2 = Input(shape=(2,))
pose3 = Input(shape=(2,))
pose4 = Input(shape=(2,))
encoder_in = Input(shape=(h,w,channels))
x = Conv2D(6, (3, 3), padding='same', activation='relu')(encoder_in)
print(x.shape)
x = MaxPooling2D((2,2))(x)
print(x.shape)
x = Conv2D(8, (1, 1), padding='same', activation='relu')(x)
print(x.shape)
x = MaxPooling2D((2,2))(x)
print(x.shape)
x = Conv2D(10, (1, 1), padding='same', activation='relu')(x)
print(x.shape)
x = MaxPooling2D((2,2))(x)
print(x.shape)
x = Conv2D(12, (1, 1), padding='same', activation='relu')(x)
print(x.shape)
x = MaxPooling2D((2,2))(x)
print(x.shape)
x = Conv2D(20, (1, 1), padding='same', activation='relu')(x)
print(x.shape)
x = MaxPooling2D((2,2))(x)
print(x.shape)
# x = keras.layers.Reshape((1400,))(x)
print('pre flatten shape')
shape = K.int_shape(x)
print(shape)
x = Flatten()(x)
# print(x.shape)
z_mean = Dense(latentDim, name='z_mean')(x)
z_log_var = Dense(latentDim, name='z_log_var')(x)
print(z_mean.shape)
print(z_log_var.shape)
# z = Lambda(sampling, output_shape=(latentDim,), name='z')([z_mean, z_log_var])
encoder_model = Model(encoder_in, [z_mean, z_log_var])
#This ensures the model will be shared, including weights
encoded1 = encoder_model(image1)
encoded2 = encoder_model(image2)
encoded3 = encoder_model(image3)
z1 = Lambda(sampling, output_shape=(latentDim,), name='z1')([encoded1[0], encoded1[1]])
z2 = Lambda(sampling, output_shape=(latentDim,), name='z2')([encoded2[0], encoded2[1]])
z3 = Lambda(sampling, output_shape=(latentDim,), name='z3')([encoded3[0], encoded3[1]])
#Now concatenate
# latent_z = keras.layers.concatenate([encoded1, encoded2, encoded3, pose1, pose2, pose3, pose4])
latent_z = keras.layers.concatenate([z1, z2, z3, pose1, pose2, pose3, pose4])
print('latent_z shape')
print(latent_z.shape)
#Now write the decoder
decoder_in = Dense(shape[1]*shape[2]*shape[3], activation='relu')(latent_z)
x = keras.layers.Reshape((shape[1], shape[2], shape[3]))(decoder_in)
print('decoder in shape')
print(x.shape)
x = Conv2D(20, (1, 1), activation='relu', padding='same')(x) # (8, 8)
x = UpSampling2D((2, 2))(x) # (16, 16)
print(x.shape)
x = Conv2D(12, (1, 1), activation='relu')(x) # (14, 14)
x = UpSampling2D((2, 2))(x) # (28, 28)
print(x.shape)
x = Conv2D(10, (1, 1), padding='same', activation='relu')(x) # (14, 14)
x = UpSampling2D((2, 2))(x) # (28, 28)
print(x.shape)
x = Conv2D(8, (1, 1), padding='same', activation='relu')(x) # (14, 14)
x = UpSampling2D((2, 2))(x) # (28, 28)
print(x.shape)
x = Conv2D(6, (3, 3), padding='same', activation='relu')(x) # (14, 14)
x = UpSampling2D((2, 2))(x) # (28, 28)
print(x.shape)
decoder_out = Conv2D(1, (3, 3), activation='sigmoid', padding='same')(x) # (100, 100)
print('decoder out')
print(decoder_out.shape)
ae = Model([image1, image2, image3, pose1, pose2, pose3, pose4], decoder_out)
# ae = Model(inputs=[image1, image2, image3], outputs=encoded)
trainGenerator = my_gen()
ae.compile(loss='mse', optimizer='adam')
ae.fit_generator(trainGenerator,
steps_per_epoch=1,
epochs=5,
validation_steps=1) | [
"crvogt26@gmail.com"
] | crvogt26@gmail.com |
2aef79885420e7847ba448d8f8a082511c15a162 | 4c852fab792606580acb3f3a61b7f86ae25930b0 | /Python/MIT-CompThinking/MITx600.1x/ProblemSets/theof/printing out all available letters.py | dd1d06b519f64693fb90a40e66cecf0d0e339e61 | [] | no_license | hmchen47/Programming | a9767a78a35c0844a1366391f48b205ff1588591 | 9637e586eee5c3c751c96bfc5bc1d098ea5b331c | refs/heads/master | 2022-05-01T01:57:46.573136 | 2021-08-09T04:29:40 | 2021-08-09T04:29:40 | 118,053,509 | 2 | 1 | null | 2021-09-20T19:54:02 | 2018-01-19T00:06:04 | Python | UTF-8 | Python | false | false | 650 | py | #!/usr/bin/env python
# _*_ coding = UTF-8 _*_
def getAvailableLetters(lettersGuessed):
'''
lettersGuessed: list, what letters have been guessed so far
returns: string, comprised of letters that represents what letters have not
yet been guessed.
'''
# FILL IN YOUR CODE HERE...
L2 = []
import string
for c in string.ascii_lowercase:
L2.append(c)
#print L2
def removeDupsBetter(L1,L2):
L1Start = L1[:]
for e in L1Start:
if e in L2:
L2.remove(e)
return ''.join(str(e) for e in L2)
return removeDupsBetter(lettersGuessed,L2) | [
"h.m.chen@ieee.org"
] | h.m.chen@ieee.org |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.