hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5811e10b88b507765a7c6837ee0d3abe51b1f6a5
| 3,625
|
py
|
Python
|
tensorflow_estimator/python/estimator/tpu/util.py
|
cyc/estimator
|
742a07296c8f584150bb02f97be7207130ded5fd
|
[
"Apache-2.0"
] | 3
|
2020-10-12T15:47:01.000Z
|
2022-01-14T19:51:26.000Z
|
tensorflow_estimator/python/estimator/tpu/util.py
|
cyc/estimator
|
742a07296c8f584150bb02f97be7207130ded5fd
|
[
"Apache-2.0"
] | 9
|
2020-09-25T22:32:02.000Z
|
2022-02-09T23:45:10.000Z
|
tensorflow_estimator/python/estimator/tpu/util.py
|
cyc/estimator
|
742a07296c8f584150bb02f97be7207130ded5fd
|
[
"Apache-2.0"
] | 2
|
2020-08-03T13:02:06.000Z
|
2020-11-04T03:15:44.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===================================================================
"""Utilities for the functionalities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
import time
import six
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import training
_ITERATIONS_PER_LOOP_VALUE_REGEX = re.compile(
r'^(?P<value>[1-9]\d*)((?P<suffix>[s|m|h])$|$)')
IterationsPerLoopCounter = collections.namedtuple('IterationsPerLoopCounter',
['value', 'unit'])
def check_positive_integer(value, name):
"""Checks whether `value` is a positive integer."""
if not isinstance(value, six.integer_types):
raise TypeError('{} must be int, got {}'.format(name, type(value)))
if value <= 0:
raise ValueError('{} must be positive, got {}'.format(name, value))
def parse_iterations_per_loop(iterations_per_loop):
"""Parses the `iterations_per_loop` value.
The parser expects the value of the `iterations_per_loop` value to be a
positive integer value with unit:`count` or time-based value `<N><s|m|h>`
where <N> is any positive integer and `s`, `m`, `h` are unit of time in
seconds, minutes, hours respectively. Examples of valid values: `3600s`, `60m`
, `1h`.
Args:
iterations_per_loop: Number of iterations or time alloted to spend on per
device loop.
Returns:
A dictionary of `value` and `unit`. The `unit` value can be either a raw
`count`, or time in `seconds`.
{
"value": <positive-integer>,
"unit": <unit: `count` | `seconds`>
}
"""
m = _ITERATIONS_PER_LOOP_VALUE_REGEX.match(str(iterations_per_loop))
if m is None:
raise ValueError(
'Invalid TPUConfig `iterations_per_loop` value. Value must be positive '
'integer value or time-based value `<N><s|m|h>` where <N> is any'
'positive integer and `s`, `m`, `h` are unit of time in seconds, '
'minutes, hours respectively. Examples of valid values: `3600s`, `60m`,'
' `1h`.')
unit_value = 'seconds' if m.group('suffix') in ['h', 'm', 's'] else 'count'
value = int(m.group('value'))
if m.group('suffix') == 'm':
value *= 60
elif m.group('suffix') == 'h':
value *= 3600
return IterationsPerLoopCounter(value, unit_value)
# TODO(b/118302029) Remove this copy of MultiHostDatasetInitializerHook after we
# release a tensorflow_estimator with MultiHostDatasetInitializerHook in
# python/estimator/util.py.
class MultiHostDatasetInitializerHook(training.SessionRunHook):
"""Creates a SessionRunHook that initializes all passed iterators."""
def __init__(self, dataset_initializers):
self._initializers = dataset_initializers
def after_create_session(self, session, coord):
del coord
start = time.time()
session.run(self._initializers)
logging.info('Initialized dataset iterators in %d seconds',
time.time() - start)
| 36.616162
| 80
| 0.689655
|
38c33bf989a7de80370bd5d1b299754bfbdfed02
| 992
|
py
|
Python
|
src/line_segmentation.py
|
MartinEthier/SimpleHTR
|
071febbbea419aa822ea4a767d2cbf733ece7724
|
[
"MIT"
] | null | null | null |
src/line_segmentation.py
|
MartinEthier/SimpleHTR
|
071febbbea419aa822ea4a767d2cbf733ece7724
|
[
"MIT"
] | null | null | null |
src/line_segmentation.py
|
MartinEthier/SimpleHTR
|
071febbbea419aa822ea4a767d2cbf733ece7724
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2
from matplotlib import pyplot as plt
from Line import Line
def segment_lines(bw_img):
# Calculates bounding boxes for all lines of text inside of image
# Invert binary so text is 1s
bw_inv = cv2.bitwise_not(bw_img)
# dilate to glue individual lines together
kernel = np.ones((5,50), np.uint8) #for post-its
dilated_img = cv2.dilate(bw_inv, kernel, iterations=1)
# Get contours and sort by bounding box length
im2, ctrs, hier = cv2.findContours(dilated_img.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
#sorted_ctrs = sorted(ctrs, key=lambda ctr: cv2.boundingRect(ctr)[1])
lines = []
for i, ctr in enumerate(ctrs):
# Get bounding box
x, y, w, h = cv2.boundingRect(ctr)
if w * h > 5000:
# Getting line image
line_img = bw_img[y:y+h, x:x+w]
lines.append(Line(line_img, i)) # append line object
return lines
| 29.176471
| 102
| 0.637097
|
a16c730d1246874375235150cbdee03a90c24f6b
| 1,446
|
py
|
Python
|
tests/test_config.py
|
z-tasker/comp-syn
|
03333f84ebfea51b4cf55f7ab42ec0eb3f9a2fbb
|
[
"MIT"
] | 20
|
2020-04-18T12:49:33.000Z
|
2022-01-27T13:36:35.000Z
|
tests/test_config.py
|
z-tasker/comp-syn
|
03333f84ebfea51b4cf55f7ab42ec0eb3f9a2fbb
|
[
"MIT"
] | 7
|
2020-04-27T01:53:58.000Z
|
2021-12-16T03:27:07.000Z
|
tests/test_config.py
|
z-tasker/comp-syn
|
03333f84ebfea51b4cf55f7ab42ec0eb3f9a2fbb
|
[
"MIT"
] | 14
|
2020-04-18T12:49:35.000Z
|
2022-01-27T13:36:41.000Z
|
import os
import pytest
from pathlib import Path
from compsyn.config import CompsynConfig
from compsyn.trial import get_trial_from_env
@pytest.mark.unit
def test_CompsynConfig() -> None:
# capture original environment COMPSYN_ variables so we can put things back after messin' about
original_values = {
key: val for key, val in os.environ.items() if key.startswith("COMPSYN_")
}
config = CompsynConfig(
experiment_name="test-patterns",
trial_id="phase-0",
hostname="pytester",
)
assert os.getenv("COMPSYN_EXPERIMENT_NAME") == "test-patterns"
assert os.getenv("COMPSYN_TRIAL_ID") == "phase-0"
assert os.getenv("COMPSYN_HOSTNAME") == "pytester"
trial = get_trial_from_env()
assert trial.experiment_name == "test-patterns"
assert trial.trial_id == "phase-0"
assert trial.hostname == "pytester"
config = CompsynConfig(
experiment_name="test-patterns",
trial_id="phase-1",
hostname="pytester",
)
print(config)
assert trial.experiment_name == "test-patterns"
assert trial.trial_id == "phase-0"
assert trial.hostname == "pytester"
trial = get_trial_from_env()
assert trial.experiment_name == "test-patterns"
assert trial.trial_id == "phase-1"
assert trial.hostname == "pytester"
# reset original environment values
for key, val in original_values.items():
os.environ[key] = val
| 26.290909
| 99
| 0.679115
|
56ac92e5cfda45e6a08469a052f980d19291ec83
| 506
|
py
|
Python
|
3.3.9/scrapy_plus/http/response.py
|
feel-easy/myspider
|
dcc65032015d7dbd8bea78f846fd3cac7638c332
|
[
"Apache-2.0"
] | 1
|
2019-02-28T10:16:00.000Z
|
2019-02-28T10:16:00.000Z
|
3.3.9/scrapy_plus/http/response.py
|
wasalen/myspider
|
dcc65032015d7dbd8bea78f846fd3cac7638c332
|
[
"Apache-2.0"
] | null | null | null |
3.3.9/scrapy_plus/http/response.py
|
wasalen/myspider
|
dcc65032015d7dbd8bea78f846fd3cac7638c332
|
[
"Apache-2.0"
] | null | null | null |
# THE WINTER IS COMING! the old driver will be driving who was a man of the world!
# -*- coding: utf-8 -*- python 3.6.7, create time is 18-11-30 下午4:03 GMT+8
from lxml import etree
class Response():
def __init__(self, url=None, headers=None, status_code=None, body=None):
self.url = url
self.headers = headers
self.status_code = status_code
self.body = body
def xpath(self, xpath_str):
html = etree.HTML(self.body)
return html.xpath(xpath_str)
| 26.631579
| 82
| 0.648221
|
f95ea65e2ea95af5d61a28664ae6454ebc82552d
| 5,337
|
py
|
Python
|
dnanexus/peaks_report.py
|
strattan/test-merge2
|
2bc5a7c94fb06cff163ab3674dbb319e45976d17
|
[
"MIT"
] | 108
|
2015-06-30T19:38:12.000Z
|
2022-03-09T06:50:59.000Z
|
dnanexus/peaks_report.py
|
strattan/test-merge2
|
2bc5a7c94fb06cff163ab3674dbb319e45976d17
|
[
"MIT"
] | 7
|
2017-06-28T06:43:41.000Z
|
2021-09-07T05:33:04.000Z
|
dnanexus/peaks_report.py
|
strattan/test-merge2
|
2bc5a7c94fb06cff163ab3674dbb319e45976d17
|
[
"MIT"
] | 59
|
2015-08-29T10:26:24.000Z
|
2021-06-25T02:27:20.000Z
|
#!/usr/bin/env python
import os, sys, logging, urlparse, requests, csv, StringIO, re, copy
import common
logger = logging.getLogger(__name__)
EPILOG = '''Notes:
Examples:
%(prog)s
'''
def get_args():
import argparse
parser = argparse.ArgumentParser(
description=__doc__, epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('experiments', help='List of ENCSR accessions to report on', nargs='*', default=None)
parser.add_argument('--infile', help='File containing ENCSR accessions', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('--outfile', help='tsv table of files with metadata', type=argparse.FileType('wb'), default=sys.stdout)
parser.add_argument('--assembly', help='Genome assembly like hg19 or mm10', required=True)
parser.add_argument('--debug', help="Print debug messages", default=False, action='store_true')
parser.add_argument('--key', help="The keypair identifier from the keyfile.", default='www')
parser.add_argument('--keyfile', help="The keyfile.", default=os.path.expanduser("~/keypairs.json"))
args = parser.parse_args()
if args.debug:
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
else: #use the defaulf logging level
logging.basicConfig(format='%(levelname)s:%(message)s')
return args
def biorep_ns(file_accession,server,keypair):
m = re.match('^/?(files)?/?(\w*)', file_accession)
if m:
acc = m.group(2)
else:
return
url = urlparse.urljoin(server, '/files/%s' %(acc))
file_object = common.encoded_get(url, keypair)
if file_object.get('derived_from'):
for f in file_object.get('derived_from'):
for repnum in biorep_ns(f,server,keypair):
yield repnum
else:
url = urlparse.urljoin(server, '%s' %(file_object.get('replicate')))
replicate_object = common.encoded_get(url, keypair)
yield replicate_object.get('biological_replicate_number')
def biorep_ages(file_accession,server,keypair):
m = re.match('^/?(files)?/?(\w*)', file_accession)
if m:
acc = m.group(2)
else:
return
url = urlparse.urljoin(server, '/files/%s' %(acc))
file_object = common.encoded_get(url, keypair)
if file_object.get('derived_from'):
for f in file_object.get('derived_from'):
for bioage in biorep_ages(f,server,keypair):
yield bioage
else:
url = urlparse.urljoin(server, '%s' %(file_object.get('replicate')))
replicate_object = common.encoded_get(url, keypair)
url = urlparse.urljoin(server, '%s' %(replicate_object.get('library')))
library_object = common.encoded_get(url, keypair)
url = urlparse.urljoin(server, '%s' %(library_object.get('biosample')))
biosample_object = common.encoded_get(url, keypair)
yield biosample_object.get('age_display')
def main():
args = get_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
authid, authpw, server = common.processkey(args.key, args.keyfile)
keypair = (authid,authpw)
if args.experiments:
exp_ids = args.experiments
else:
exp_ids = args.infile
for (i, exp_id) in enumerate(exp_ids):
exp_id = exp_id.rstrip()
logger.info('%s' %(exp_id))
url = urlparse.urljoin(server, 'metadata/type=experiment&accession=%s/metadata.tsv' %(exp_id))
r = requests.get(url, auth=keypair)
try:
r.raise_for_status()
except:
logger.error('%s failed to get metadata. GET returned %s' %(exp_id, r.return_code))
logger.debug('%s' %(r.text))
logger.error('Skipping ...')
continue
reader = csv.DictReader(StringIO.StringIO(r.text), delimiter='\t')
fieldnames = copy.copy(reader.fieldnames)
# fieldnames.remove('Biological replicate(s)')
# fieldnames.insert(4,'Biological replicate(s)')
# fieldnames.remove('Biosample Age')
# fieldnames.insert(10,'Biosample Age')
fieldnames.append('Derived from')
writer = csv.DictWriter(args.outfile,fieldnames, delimiter='\t')
writer.writeheader()
for file_metadata in reader:
file_accession = file_metadata.get('File accession')
url = urlparse.urljoin(server, 'files/%s' %(file_accession))
file_object = common.encoded_get(url, keypair)
# bio_reps = sorted(list(set(biorep_ns(file_accession, server, keypair))))
# file_metadata['Biological replicate(s)'] = ",".join([str(n) for n in bio_reps])
# bio_ages = sorted(list(set(biorep_ages(file_accession, server, keypair)))) or ""
# file_metadata.update({'Biosample Age': ",".join(bio_ages)})
if file_object.get('derived_from'):
derived_from = ",".join([str(f.split('/')[2]) for f in file_object.get('derived_from')])
else:
derived_from = None
file_metadata.update({'Derived from': derived_from})
#print file_metadata
writer.writerow(file_metadata)
if __name__ == '__main__':
main()
| 38.956204
| 130
| 0.634251
|
9e70c643a7c29708773d95a412ee0d3df68dfe94
| 4,133
|
py
|
Python
|
Shortest_Path/env.py
|
Chang-Chia-Chi/Maze-Generator-and-Shortest-Path-Finding-Project
|
fb70447a89e44f66991da35807ba998914df185d
|
[
"MIT"
] | 4
|
2020-09-25T02:07:17.000Z
|
2022-01-10T04:59:31.000Z
|
Shortest_Path/env.py
|
Chang-Chia-Chi/Maze-Generator-and-Shortest-Path-Finding-Project
|
fb70447a89e44f66991da35807ba998914df185d
|
[
"MIT"
] | null | null | null |
Shortest_Path/env.py
|
Chang-Chia-Chi/Maze-Generator-and-Shortest-Path-Finding-Project
|
fb70447a89e44f66991da35807ba998914df185d
|
[
"MIT"
] | 1
|
2021-08-04T12:39:48.000Z
|
2021-08-04T12:39:48.000Z
|
import pygame
class Node:
"""
create node object to store node coordinate and its parent
state: position standing --> tuple
action: action take to move --> str
parent: parent of node --> Node
"""
def __init__(self, state:tuple, action:str, parent=None):
self.state = state
self.parent = parent
self.action = action
def __eq__(self, other):
if isinstance(other, Node):
return self.state == other.state
else:
return False
def __repr__(self):
if self.parent is None:
fmt = "Node {} and no Parent".format(self.state)
else:
fmt = "Node {} with Parent {}".format(self.state, self.parent.state)
return fmt
def __hash__(self):
return hash(self.state)
class Board:
"""
create board object to present agent states
v_cells: number of vertical cells --> int
h_cells: number of horizontal cells --> int
origin_x: origin x position of screen --> int
origin_y: origin y position of screen --> int
cell_size: size per cell --> int
screen: pygame object "pygame.display.set_mode()" --> pygame
colors: color dictionary --> dict
"""
def __init__(self, v_cells:int, h_cells:int, origin_x:int, origin_y:int,
cell_size:int, screen:int, colors:dict):
self.v_cells = v_cells
self.h_cells = h_cells
self.origin_x = origin_x
self.origin_y = origin_y
self.cell_size = cell_size
self.screen = screen
self.colors = colors
self.wall = set()
self.visited = set()
self.frontiers = set()
self.path = list()
self.start = None
self.target = None
def draw_board(self, return_cells=True)->bool:
cells = []
for i in range(self.v_cells):
row = []
for j in range(self.h_cells):
rect = pygame.Rect(self.origin_x + i*self.cell_size,
self.origin_y + j*self.cell_size,
self.cell_size, self.cell_size)
color = self.colors["white"]
if (i, j) == self.start:
color = self.colors["blue"]
elif (i, j) == self.target:
color = self.colors["red"]
elif (i, j) in self.frontiers:
color = self.colors["frontier"]
elif (i, j) in self.wall:
color = self.colors["gray"]
elif (i, j) in self.path:
color = self.colors["p_yellow"]
else:
for node in self.visited:
if (i, j) == node.state:
color = self.colors["green"]
pygame.draw.rect(self.screen, color, rect)
row.append(rect)
cells.append(row)
if return_cells:
return cells
def neighbors(self, state:tuple, wall_included=False)->list:
"""
Return possible action could be taken
state: position of node --> tuple
wall_included: whether walls are included in neighbors
"""
row, col = state
actions = {
"UP": (row-1, col),
"DOWN": (row+1, col),
"LEFT": (row, col-1),
"RIGHT": (row, col+1)
}
res = []
for action, (r, c) in actions.items():
if not wall_included:
if 0 <= r < self.v_cells and 0 <= c < self.h_cells and \
(r, c) not in self.wall:
res.append([action, (r, c)])
else:
if 0 <= r < self.v_cells and 0 <= c < self.h_cells:
res.append([action, (r, c)])
return res if len(res) != 0 else None
def reset(self):
self.wall = set()
self.visited = set()
self.path = list()
self.start = None
self.target = None
def clear_visited(self):
self.visited = set()
self.path = list()
| 32.801587
| 80
| 0.506412
|
0b1d1d39468c88c2be9ce8f337231810fb8cb217
| 8,343
|
py
|
Python
|
tests/unit/api/test_Server.py
|
samarthdd/cdr-plugin-folder-to-folder
|
def780f5590f63066194ff7b348fd256d7f74a10
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/api/test_Server.py
|
samarthdd/cdr-plugin-folder-to-folder
|
def780f5590f63066194ff7b348fd256d7f74a10
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/api/test_Server.py
|
samarthdd/cdr-plugin-folder-to-folder
|
def780f5590f63066194ff7b348fd256d7f74a10
|
[
"Apache-2.0"
] | null | null | null |
import imp
import inspect
from unittest import TestCase
from unittest.mock import patch, call
from fastapi_offline import FastAPIOffline as FastAPI
from osbot_utils.utils.Misc import list_set
from cdr_plugin_folder_to_folder.api.Server import Server
from cdr_plugin_folder_to_folder.utils.testing.Temp_API_Server import Temp_API_Server
class test_Server(TestCase):
def setUp(self) -> None:
app = FastAPI()
self.server = Server(app=app, reload=False)
#def test_setup(self):
# self.server.add_routes()
@patch("uvicorn.run")
def test_start(self, mock_run):
expected_call = call('cdr_plugin_folder_to_folder.api.Server:app',
host='0.0.0.0',
port=8880,
log_level='info',
reload=False)
self.server.start()
assert mock_run.mock_calls == [expected_call]
def test_start_stop(self):
with Temp_API_Server() as api_server:
assert api_server.server_running() is True
assert api_server.http_GET() == {'status': 'ok'}
assert api_server.server_running() is False
@patch("uvicorn.run")
def test_start__via__main(self, mock_run): # this test confirms that when running the Server directly the uvicorn.run is called
path_file = inspect.getfile(Server) # get path of Server
imp.load_source('__main__', path_file) # force reload and set __main__
assert mock_run.call_count == 1
# lock the current rules mappings to that any new API changes also require an change to this test
def test_routes(self):
# routes before server.add_routes()
assert [route.path for route in self.server.app.routes] == ['/openapi.json', '/static-offline-docs', '/docs', '/docs/oauth2-redirect', '/redoc']
assert self.server.routes() == {}
self.server.add_routes()
assert self.server.routes() == { '/' : { 'methods': {'GET' }, 'name': 'root' , 'path_format': '/' },
'/configuration/config/' : { 'methods': {'GET' }, 'name': 'config' , 'path_format': '/configuration/config/' },
'/configuration/configure_env/' : { 'methods': {'POST'}, 'name': 'configure_environment' , 'path_format': '/configuration/configure_env/' },
'/configuration/configure_gw_sdk_endpoints/' : { 'methods': {'POST'}, 'name': 'configure_multiple_gw_sdk_endpoints' , 'path_format': '/configuration/configure_gw_sdk_endpoints/' },
'/configuration/reload_elastic_file_metadata/' : { 'methods': {'PUT' }, 'name': 'reload_elastic_file_metadata' , 'path_format': '/configuration/reload_elastic_file_metadata/' },
'/configuration/reload_hash_json/' : { 'methods': {'PUT' }, 'name': 'reload_elastic_file_metadata' , 'path_format': '/configuration/reload_hash_json/' },
'/configuration/reload_kibana_dashboards/' : { 'methods': {'PUT' }, 'name': 'reload_elastic_file_metadata' , 'path_format': '/configuration/reload_kibana_dashboards/'},
'/configuration/reset_logging/' : { 'methods': {'PUT' }, 'name': 'reset_logging' , 'path_format': '/configuration/reset_logging/' },
# '/file-distributor/hd1/{num_of_files}' : { 'methods': {'GET' }, 'name': 'get_hd1_files' , 'path_format': '/file-distributor/hd1/{num_of_files}' },
'/file-distributor/hd2/status' : { 'methods': {'GET' }, 'name': 'get_hd2_status_files' , 'path_format': '/file-distributor/hd2/status' },
'/file-distributor/hd2/data' : { 'methods': {'GET'}, 'name': 'get_hd2_data_files' , 'path_format': '/file-distributor/hd2/data' },
'/file-distributor/hd2/processed' : { 'methods': {'GET'}, 'name': 'get_hd2_processed_files' , 'path_format': '/file-distributor/hd2/processed' },
#'/file-distributor/hd3/{num_of_files}' : { 'methods': {'GET' }, 'name': 'get_hd3_files' , 'path_format': '/file-distributor/hd3/{num_of_files}' },
'/health' : { 'methods': {'GET' }, 'name': 'health' , 'path_format': '/health' },
'/pre-processor/clear-data-and-status' : { 'methods': {'POST'}, 'name': 'clear_data_and_status_folders' , 'path_format': '/pre-processor/clear-data-and-status' },
'/pre-processor/pre-process' : { 'methods': {'POST'}, 'name': 'pre_process_hd1_data_to_hd2' , 'path_format': '/pre-processor/pre-process' },
'/pre-processor/pre_process_folder' : { 'methods': {'POST'}, 'name': 'pre_process_a_folder' ,'path_format': '/pre-processor/pre_process_folder' },
'/processing/single_file' : { 'methods': {'POST'}, 'name': 'process_single_file' , 'path_format': '/processing/single_file' },
'/processing/start' : { 'methods': {'POST'}, 'name': 'process_hd2_data_to_hd3' , 'path_format': '/processing/start' },
'/processing/start-sequential' : { 'methods': {'POST'}, 'name': 'process_hd2_data_to_hd3_sequential' , 'path_format': '/processing/start-sequential' },
'/processing/status' : { 'methods': {'GET' }, 'name': 'get_the_processing_status' , 'path_format': '/processing/status' },
'/processing/stop' : { 'methods': {'POST'}, 'name': 'stop_processing' , 'path_format': '/processing/stop' },
'/status' : { 'methods': {'GET' }, 'name': 'status' , 'path_format': '/status' },
'/version' : { 'methods': {'GET' }, 'name': 'version' , 'path_format': '/version' }}
# todo add global exception handler
#def test__exception_in_method
# @app.exception_handler(StarletteHTTPException)
# async def http_exception_handler(request, exc):
# return PlainTextResponse(str(exc.detail), status_code=exc.status_code)
# FastAPI allows multiple rules mappings (which should never happen)
def test__detect_duplicate_routes(self):
self.server.add_routes()
paths_format = [route.path for route in self.server.app.routes]
assert sorted(paths_format) == list_set(paths_format)
| 91.681319
| 242
| 0.461824
|
b87b976ecadb19967b817edda9f0f98a99837675
| 641
|
py
|
Python
|
examples/pyplot/latex.py
|
hadivafaii/vedo
|
15f9adbd36d25c0212cbd4eb0c15af54c19f3819
|
[
"CC0-1.0"
] | 836
|
2020-06-14T02:38:12.000Z
|
2022-03-31T15:39:50.000Z
|
examples/pyplot/latex.py
|
hadivafaii/vedo
|
15f9adbd36d25c0212cbd4eb0c15af54c19f3819
|
[
"CC0-1.0"
] | 418
|
2020-06-14T10:51:32.000Z
|
2022-03-31T23:23:14.000Z
|
examples/pyplot/latex.py
|
hadivafaii/vedo
|
15f9adbd36d25c0212cbd4eb0c15af54c19f3819
|
[
"CC0-1.0"
] | 136
|
2020-06-14T02:26:41.000Z
|
2022-03-31T12:47:18.000Z
|
from vedo import Latex
# https://matplotlib.org/tutorials/text/mathtext.html
latex1 = r'x= \frac{ - b \pm \sqrt {b^2 - 4ac} }{2a}'
latex2 = r'\mathcal{A}\mathrm{sin}(2 \omega t)'
latex3 = r'I(Y | X)=\sum_{x \in \mathcal{X}, y \in \mathcal{Y}} p(x, y) \log \left(\frac{p(x)}{p(x, y)}\right)'
latex4 = r'\Gamma_{\epsilon}(x)=\left[1-e^{-2 \pi \epsilon}\right]^{1-x} \prod_{n=0}^{\infty} \frac{1-\exp (-2 \pi \epsilon(n+1))}{1-\exp (-2 \pi \epsilon(x+n))}'
ltx = Latex(latex4, s=1, c='darkblue', bg='', alpha=0.9, usetex=False)
ltx.crop(0.3, 0.3) # crop top and bottom 30%
ltx.pos(2,0,0)
ltx.show(axes=1, size=(1400,700), zoom=1.8).close()
| 42.733333
| 162
| 0.605304
|
5290a2ecfc25f008ac93634363bd4c4f34a7b03e
| 1,061
|
py
|
Python
|
video_classification/models/saving_module.py
|
gpostelnicu/video_classification
|
ac8cf0b1a3365ec42ec92fd8b3ad946c6e5c8e15
|
[
"MIT"
] | null | null | null |
video_classification/models/saving_module.py
|
gpostelnicu/video_classification
|
ac8cf0b1a3365ec42ec92fd8b3ad946c6e5c8e15
|
[
"MIT"
] | null | null | null |
video_classification/models/saving_module.py
|
gpostelnicu/video_classification
|
ac8cf0b1a3365ec42ec92fd8b3ad946c6e5c8e15
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
CONFIG = 'config'
STATE = 'state'
class SavingModule(nn.Module):
"""
SavingModule provides saving and loading functionality for a module.
In order for a class to inherit from SavingModule, they need to:
- have a class attribute config_cls with the class of the configuration to be used;
- have an instance attribute called config storing this config.
"""
config_cls = None
def __init__(self):
super().__init__()
if self.config_cls is None:
raise ValueError("Children classes need to set config_cls.")
@classmethod
def from_dict(cls, checkpoint: dict):
assert CONFIG in checkpoint
config = cls.config_cls(**checkpoint[CONFIG])
module = cls(config)
if STATE in checkpoint:
module.load_state_dict(checkpoint[STATE])
return module
def to_dict(self, include_state=True):
dic = {CONFIG: dict(self.config._asdict())}
if include_state:
dic[STATE] = self.state_dict()
return dic
| 27.921053
| 87
| 0.6541
|
5b04e7e8bd2eecc98e24d1e6612b34311b96ae0d
| 12,077
|
py
|
Python
|
invokust/aws_lambda/lambda_load_test.py
|
ktrueda/invokust
|
ac698589ac993bd52928b119be3c471faf2891c9
|
[
"MIT"
] | null | null | null |
invokust/aws_lambda/lambda_load_test.py
|
ktrueda/invokust
|
ac698589ac993bd52928b119be3c471faf2891c9
|
[
"MIT"
] | null | null | null |
invokust/aws_lambda/lambda_load_test.py
|
ktrueda/invokust
|
ac698589ac993bd52928b119be3c471faf2891c9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import json
import time
import logging
import threading
from boto3.session import Session
from botocore.client import Config
logger = logging.getLogger(__name__)
logging.getLogger("botocore").setLevel(logging.CRITICAL)
session = Session()
config = Config(connect_timeout=10, read_timeout=310)
client = session.client("lambda", config=config)
class LambdaLoadTest(object):
"""
An object to run and collect statistics and results from multiple parallel locust load
tests running on AWS Lambda
"""
def __init__(
self,
lambda_function_name,
threads,
ramp_time,
time_limit,
lambda_payload,
lambda_timeout=300000,
):
self.lock = threading.Lock()
self.start_time = time.time()
self.logger = logging.getLogger()
self.threads = threads
self.ramp_time = ramp_time
self.time_limit = (
time_limit # don't start new threads after {time_limit} seconds
)
self.lambda_function_name = lambda_function_name
self.lambda_payload = lambda_payload
self.lambda_invocation_errors = 0
self.lambda_invocation_count = 0
self.lambda_invocation_error_threshold = 20
self.lambda_total_execution_time = 0
self.requests_fail = 0
self.request_fail_ratio_threshold = 0.5
self.requests_total = 0
self.locust_results = []
self.thread_data = {}
self.print_stats_delay = 3
self.exit_threads = False
self.lambda_timeout = lambda_timeout
def update_thread_data(self, thread_id, key, value):
"""
Receives data from threads and stores in the thread_data dict
"""
with self.lock:
if thread_id not in self.thread_data:
self.thread_data[thread_id] = {}
self.thread_data[thread_id][key] = value
def get_thread_count(self):
"""
Returns number of load test threads running
"""
return len([t for t in threading.enumerate() if t.getName() != "MainThread"])
def get_time_elapsed(self):
"""
Returns elapsed time in seconds since starting the load test
"""
return round(time.time() - self.start_time)
def increase_lambda_invocation_error(self):
"""
Increases Lambda invocation error count
"""
with self.lock:
self.lambda_invocation_errors += 1
def increase_lambda_invocation_count(self):
"""
Increases Lambda invocation count
"""
with self.lock:
self.lambda_invocation_count += 1
def get_invocation_error_ratio(self):
"""
Returns ratio of Lambda invocations to invocation errors
"""
try:
return self.lambda_invocation_errors / float(self.lambda_invocation_count)
except ZeroDivisionError:
return 0
def increase_requests_total(self, requests):
"""
Increases total request count
"""
with self.lock:
self.requests_total += requests
def increase_requests_fail(self, requests):
"""
Increases total request fail count
"""
with self.lock:
self.requests_fail += requests
def get_request_fail_ratio(self):
"""
Returns ratio of failed to total requests
"""
try:
return self.requests_fail / float(self.requests_total)
except ZeroDivisionError:
return 0
def append_locust_results(self, results):
"""
Logs results from a locust execution. All results needs to be aggregated in order to show meaningful statistics of the whole load test
"""
with self.lock:
self.locust_results.append(results)
def get_summary_stats(self):
"""
Returns summary statistics in a dict
"""
return {
"lambda_invocation_count": self.lambda_invocation_count,
"total_lambda_execution_time": self.lambda_total_execution_time,
"requests_total": self.requests_total,
"request_fail_ratio": self.get_request_fail_ratio(),
"invocation_error_ratio": self.get_invocation_error_ratio(),
}
def get_stats(self):
"""
Returns current statistics in a dict
"""
return {
"thread_count": self.get_thread_count(),
"rpm": self.calculate_rpm(),
"time_elapsed": self.get_time_elapsed(),
"requests_total": self.requests_total,
"request_fail_ratio": self.get_request_fail_ratio(),
"invocation_error_ratio": self.get_invocation_error_ratio(),
}
def get_locust_results(self):
"""
Returns a list of locust results
"""
return self.locust_results
def increase_lambda_execution_time(self, time):
"""
Add Lambda execution time to the total
"""
with self.lock:
self.lambda_total_execution_time += time
def calculate_rpm(self):
"""
Returns current total request per minute across all threads
"""
return round(
sum(
self.thread_data[thread_id]["rpm"]
for thread_id in self.thread_data
if "rpm" in self.thread_data[thread_id]
)
)
def check_error_threshold(self):
"""
Checks if the current Lambda and request fail ratios are within thresholds
"""
if self.lambda_invocation_errors > self.lambda_invocation_error_threshold:
self.logger.error(
f"Error limit reached. invocation error count/threshold: "
f"{self.lambda_invocation_errors}/{self.lambda_invocation_error_threshold}"
)
return True
elif self.get_request_fail_ratio() > self.request_fail_ratio_threshold:
self.logger.error(
f"Error limit reached. requests failed ratio/threshold: "
f"{self.get_request_fail_ratio()}/{self.request_fail_ratio_threshold}"
)
return True
else:
return False
def thread_required(self):
"""
Returns True if a new thread should be started when ramping up over time
"""
result = False
if self.get_thread_count() < self.threads:
next_thread_interval = (
self.ramp_time / self.threads
) * self.get_thread_count()
if self.get_time_elapsed() > next_thread_interval:
result = True
return result
def stop_threads(self):
"""
Sets a boolean to stop threads
"""
with self.lock:
self.exit_threads = True
def start_new_thread(self):
"""
Creates a new load test thread
"""
t_name = "thread_{0}".format(threading.activeCount())
t = threading.Thread(name=t_name, target=self.thread)
t.daemon = True
t.start()
def thread(self):
"""
This method is a single thread and performs the actual execution of the Lambda function and logs the statistics/results
"""
self.logger.info("thread started")
thread_start_time = time.time()
thread_id = threading.current_thread().getName()
self.update_thread_data(thread_id, "start_time", thread_start_time)
while True:
thread_run_time = time.time() - thread_start_time
if self.exit_threads:
break
if self.ramp_time in [0.0, 0]:
sleep_time = 0
else:
sleep_time = round(max(0, self.ramp_time - thread_run_time) / 30)
function_start_time = time.time()
try:
self.logger.info("Invoking lambda...")
response = client.invoke(
FunctionName=self.lambda_function_name,
Payload=json.dumps(self.lambda_payload),
)
except Exception as e:
self.logger.critical("Lambda invocation failed: {0}".format(repr(e)))
time.sleep(2)
continue
function_end_time = time.time()
self.increase_lambda_invocation_count()
if "FunctionError" in response:
logger.error(
"error {0}: {1}".format(
response["FunctionError"], response["Payload"].read()
)
)
self.increase_lambda_invocation_error()
time.sleep(2)
continue
payload = response["Payload"].read()
payload_json_str = json.loads(payload.decode("utf-8"))
if not payload_json_str:
logger.error("No results in payload")
self.increase_lambda_invocation_error()
time.sleep(2)
continue
results = json.loads(payload_json_str)
function_duration = function_end_time - function_start_time
total_rpm = results["num_requests"] / (function_duration / 60)
lambda_execution_time = self.lambda_timeout - results["remaining_time"]
self.append_locust_results(results)
self.increase_requests_fail(results["num_requests_fail"])
self.increase_requests_total(results["num_requests"])
self.update_thread_data(thread_id, "rpm", total_rpm)
self.update_thread_data(
thread_id, "lambda_execution_time", lambda_execution_time
)
self.increase_lambda_execution_time(lambda_execution_time)
logger.info(
"Lambda invocation complete. Requests (errors): {0} ({1}), execution time: {2}ms, sleeping: {3}s".format(
results["num_requests"],
results["num_requests_fail"],
lambda_execution_time,
sleep_time,
)
)
time.sleep(sleep_time)
self.logger.info("thread finished")
def run(self):
"""
Starts the load test, periodically prints statistics and starts new threads
"""
self.logger.info(
"\nStarting load test..."
f"\nFunction name: {self.lambda_function_name}"
f"\nRamp time: {self.ramp_time}s"
f"\nThreads: {self.threads}"
f"\nLambda payload: {self.lambda_payload}"
f"\nStart ramping down after: {self.time_limit}s"
)
self.start_new_thread()
while True:
self.logger.info(
"threads: {thread_count}, rpm: {rpm}, time elapsed: {time_elapsed}s, total requests from finished threads: {requests_total}, "
"request fail ratio: {request_fail_ratio}, invocation error ratio: {invocation_error_ratio}".format(
**self.get_stats()
)
)
if self.thread_required():
self.start_new_thread()
if self.check_error_threshold():
self.stop_threads()
self.logger.info("Waiting for threads to exit...")
while self.get_thread_count() > 0:
time.sleep(1)
else:
break
if self.time_limit and self.get_time_elapsed() > self.time_limit:
self.logger.info("Time limit reached. Starting ramp down...")
self.stop_threads()
self.logger.info(
"Waiting for all Lambdas to return. This may take up to {0}.".format(
self.lambda_payload["run_time"]
)
)
while self.get_thread_count() > 0:
time.sleep(1)
else:
break
time.sleep(self.print_stats_delay)
| 33.547222
| 142
| 0.578703
|
68142852ae778009091ecacf5e33effcede75c1c
| 421
|
py
|
Python
|
py/tests/test_Sample.py
|
zcemycl/algoTest
|
9518fb2b60fd83c85aeb2ab809ff647aaf643f0a
|
[
"MIT"
] | 1
|
2022-01-26T16:33:45.000Z
|
2022-01-26T16:33:45.000Z
|
py/tests/test_Sample.py
|
zcemycl/algoTest
|
9518fb2b60fd83c85aeb2ab809ff647aaf643f0a
|
[
"MIT"
] | null | null | null |
py/tests/test_Sample.py
|
zcemycl/algoTest
|
9518fb2b60fd83c85aeb2ab809ff647aaf643f0a
|
[
"MIT"
] | 1
|
2022-01-26T16:35:44.000Z
|
2022-01-26T16:35:44.000Z
|
import unittest
from solns.solnSample import *
class TestCase(unittest.TestCase):
def test_classmethod(self):
s = Solution.fromOneNumber(1)
self.assertEqual(s.soln1(),2)
def test_abstractmethod(self):
s = Solution(1,2)
self.assertEqual(s.soln1(),3)
def test_staticSoln2(self):
self.assertEqual(Solution.soln2(1,2),3)
if __name__ == '__main__':
unittest.main()
| 22.157895
| 47
| 0.662708
|
c9c91ceb39de42c44f9ce81658aa79b896999552
| 2,694
|
py
|
Python
|
python/paddle/fluid/tests/unittests/test_nn_functional_embedding_static.py
|
zhupengyang/Paddle
|
ad6e3dd69cd915dd61287e96de7ec4ae132d24a5
|
[
"Apache-2.0"
] | 1
|
2021-12-27T02:41:23.000Z
|
2021-12-27T02:41:23.000Z
|
python/paddle/fluid/tests/unittests/test_nn_functional_embedding_static.py
|
zlsh80826/Paddle
|
c560a7d57aad990f374ebadd330351f18e2ca65f
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/tests/unittests/test_nn_functional_embedding_static.py
|
zlsh80826/Paddle
|
c560a7d57aad990f374ebadd330351f18e2ca65f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
import paddle.nn.functional as functional
class EmbeddingStatic(unittest.TestCase):
def test_1(self):
prog = fluid.Program()
with fluid.program_guard(prog):
def test_bad_x():
initializer = fluid.initializer.NumpyArrayInitializer(
np.random.random(size=(128, 100)))
param_attr = fluid.ParamAttr(
name="emb_weight",
learning_rate=0.5,
initializer=initializer,
trainable=True)
weight = prog.global_block().create_parameter(
(128, 100), attr=param_attr, dtype="float32")
label = fluid.layers.data(
name="label",
shape=[4],
append_batch_size=False,
dtype="int64")
emb = functional.embedding(
x=label, weight=weight, sparse=True, name="embedding")
test_bad_x()
def test_2(self):
prog = fluid.Program()
with fluid.program_guard(prog):
def test_bad_x():
initializer = fluid.initializer.NumpyArrayInitializer(
np.random.random(size=(128, 100)))
param_attr = fluid.ParamAttr(
name="emb_weight",
learning_rate=0.5,
initializer=initializer,
trainable=True)
weight = prog.global_block().create_parameter(
(128, 100), attr=param_attr, dtype="float32")
label = fluid.layers.data(
name="label",
shape=[4],
append_batch_size=False,
dtype="int32")
emb = functional.embedding(
x=label, weight=weight, sparse=True, name="embedding")
test_bad_x()
if __name__ == '__main__':
unittest.main()
| 32.457831
| 74
| 0.5683
|
7e58f820f09f7605b685dc26f6fa5168271512cf
| 2,433
|
py
|
Python
|
log_casp_act/run_model_579.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
log_casp_act/run_model_579.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
log_casp_act/run_model_579.py
|
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
|
54a5ef7e868be34289836bbbb024a2963c0c9c86
|
[
"MIT"
] | null | null | null |
import numpy as np
from math import *
import pymultinest
import sys
sys.path.insert(0, '/home/kochenma/pysb')
from pysb.integrate import Solver
import csv
import datetime
import time as tm
from model_579 import model
from pysb.pathfinder import set_path
set_path('bng', '/home/kochenma/BioNetGen')
data_object = []
with open('earm_data.csv') as data_file:
reader = csv.reader(data_file)
line = list(reader)
for each in line:
data_object.append(each)
for i, each in enumerate(data_object):
if i > 0:
for j, item in enumerate(each):
data_object[i][j] = float(data_object[i][j])
data_object = data_object[1:]
time = []
for each in data_object:
time.append(float(each[0]))
model_solver = Solver(model, time, integrator='vode', integrator_options={'atol': 1e-12, 'rtol': 1e-12})
def prior(cube, ndim, nparams):
for k, every in enumerate(model.parameters):
if every.name[-3:] == '1kf':
cube[k] = cube[k]*4 - 4
if every.name[-3:] == '2kf':
cube[k] = cube[k]*4 - 8
if every.name[-3:] == '1kr':
cube[k] = cube[k]*4 - 4
if every.name[-3:] == '1kc':
cube[k] = cube[k]*4 - 1
postfixes = ['1kf', '2kf', '1kr', '1kc']
def loglike(cube, ndim, nparams):
point = []
cube_index = 0
for k, every in enumerate(model.parameters):
if every.name[-3:] in postfixes:
point.append(10**cube[cube_index])
cube_index += 1
else:
point.append(model.parameters[k].value)
model_solver.run(point)
failed = False
for every in model_solver.yobs:
for thing in every:
if thing <= -0.00000001 or np.isnan(thing):
failed = True
if failed:
return ['fail', -10000.0]
else:
parpc = model_solver.yobs[-1][6]/(model_solver.yobs[-1][1] + model_solver.yobs[-1][6])
if (parpc > 0.0) and (parpc < 1.00000001):
print log(parpc), point
return ['sim', log(parpc)]
else:
return ['fail', -10000.0]
n_params = 0
for m, lotsa in enumerate(model.parameters):
if lotsa.name[-3:] == '1kf':
n_params += 1
if lotsa.name[-3:] == '2kf':
n_params += 1
if lotsa.name[-3:] == '1kr':
n_params += 1
if lotsa.name[-3:] == '1kc':
n_params += 1
start_time = tm.clock()
counts = [0, 0]
pymultinest.run(loglike, prior, n_params, evidence_tolerance=0.0001, n_live_points=16000, log_zero=-1e3, sampling_efficiency=0.3, outputfiles_basename='/scratch/kochenma/log_casp_act/579/', resume = False, verbose = False, counts=counts)
print counts
print 'start time', start_time
print 'end time', tm.clock()
| 25.610526
| 237
| 0.671599
|
a47f34da66634483663c102d2f6ac0337c109f62
| 42
|
py
|
Python
|
modules/__init__.py
|
autonomousvision/data_aggregation
|
76777156a465cbb77d6d5ab88da8f1812e7ff043
|
[
"MIT"
] | 29
|
2020-03-19T14:11:15.000Z
|
2022-02-01T14:51:40.000Z
|
modules/__init__.py
|
autonomousvision/data_aggregation
|
76777156a465cbb77d6d5ab88da8f1812e7ff043
|
[
"MIT"
] | 4
|
2020-06-24T18:49:27.000Z
|
2020-11-18T12:31:26.000Z
|
modules/__init__.py
|
autonomousvision/data_aggregation
|
76777156a465cbb77d6d5ab88da8f1812e7ff043
|
[
"MIT"
] | 5
|
2020-06-24T02:00:13.000Z
|
2021-06-05T08:54:34.000Z
|
from .screen_manager import ScreenManager
| 21
| 41
| 0.880952
|
fd661be1d36583e36f04f291cf696e0fc5fec77c
| 271
|
py
|
Python
|
util/make_ascii.py
|
kaisatec/duktape
|
d3dfd199897876dc40d93d9a6da4d6204719e9b1
|
[
"MIT"
] | null | null | null |
util/make_ascii.py
|
kaisatec/duktape
|
d3dfd199897876dc40d93d9a6da4d6204719e9b1
|
[
"MIT"
] | null | null | null |
util/make_ascii.py
|
kaisatec/duktape
|
d3dfd199897876dc40d93d9a6da4d6204719e9b1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# Paranoia escape input file to be printable ASCII.
#
import os, sys
inp = sys.stdin.read().decode('utf-8')
for c in inp:
if (ord(c) >= 0x20 and ord(c) <= 0x7e) or (c in '\x0a'):
sys.stdout.write(c)
else:
sys.stdout.write('\\u%04x' % ord(c))
| 19.357143
| 57
| 0.619926
|
c9d2dd1bd31ba0462b0860ea6d7811b96f312028
| 2,802
|
py
|
Python
|
cafe/plugins/subunit/cafe/common/reporting/subunit_report.py
|
melissa-kam/opencafe
|
af90c228084d479afa60b8b06a6b5d4d1adf2b8e
|
[
"Apache-2.0"
] | null | null | null |
cafe/plugins/subunit/cafe/common/reporting/subunit_report.py
|
melissa-kam/opencafe
|
af90c228084d479afa60b8b06a6b5d4d1adf2b8e
|
[
"Apache-2.0"
] | null | null | null |
cafe/plugins/subunit/cafe/common/reporting/subunit_report.py
|
melissa-kam/opencafe
|
af90c228084d479afa60b8b06a6b5d4d1adf2b8e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Rackspace
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
import os
import sys
import uuid
import pytz
import subunit
from cafe.common.reporting.base_report import BaseReport
class SubunitReport(BaseReport):
def generate_report(self, result_parser, all_results=None, path=None):
""" Generates a Subunit report in the specified directory. """
result_path = path or os.getcwd()
if os.path.isdir(result_path):
result_path += "/subunit_results"
with open(result_path, 'wb') as result_file:
output = subunit.v2.StreamResultToBytes(result_file)
output.startTestRun()
# Convert Result objects to dicts for processing
for result in all_results:
test_result = result.__dict__
if test_result.get('failure_trace') is not None:
test_result['result'] = "fail"
elif test_result.get('skipped_msg') is not None:
test_result['result'] = "skip"
elif test_result.get('error_trace') is not None:
test_result['result'] = "fail"
else:
test_result['result'] = "success"
if test_result['test_method_name'] == "setUpClass":
# This case is to match the tempest format
test_id = "{0} ({1})".format(
test_result['test_method_name'],
test_result['test_class_name'])
else:
test_id = "{0}.{1}".format(
test_result['test_class_name'],
test_result['test_method_name'])
kwargs = {
"timestamp": datetime.now(pytz.UTC),
"test_id": unicode(test_id)}
output.status(**kwargs)
kwargs["test_status"] = test_result['result']
kwargs["file_bytes"] = bytes(test_result.get(
'failure_trace') or test_result.get('error_trace') or "0")
kwargs["file_name"] = "stdout"
kwargs["mime_type"] = unicode("text/plain;charset=utf8")
output.status(**kwargs)
output.stopTestRun()
| 40.608696
| 78
| 0.590293
|
0848ca688df91d21e19bc633f06387fb9fc973c2
| 2,117
|
py
|
Python
|
business_rules_bulk/actions.py
|
awaazde/business-rules
|
7e5c998db4b06268430e19c65bb61883247e9225
|
[
"MIT"
] | null | null | null |
business_rules_bulk/actions.py
|
awaazde/business-rules
|
7e5c998db4b06268430e19c65bb61883247e9225
|
[
"MIT"
] | null | null | null |
business_rules_bulk/actions.py
|
awaazde/business-rules
|
7e5c998db4b06268430e19c65bb61883247e9225
|
[
"MIT"
] | 1
|
2020-04-29T06:49:50.000Z
|
2020-04-29T06:49:50.000Z
|
import inspect
from . import fields
from .utils import fn_name_to_pretty_label
class BaseActions:
""" Classes that hold a collection of actions to use with the rules
engine should inherit from this.
"""
@classmethod
def get_all_actions(cls):
methods = inspect.getmembers(cls)
return [{'name': m[0],
'label': m[1].label,
'params': m[1].params
} for m in methods if getattr(m[1], 'is_rule_action', False)]
def _validate_action_parameters(func, params):
""" Verifies that the parameters specified are actual parameters for the
function `func`, and that the field types are FIELD_* types in fields.
"""
if params is not None:
# Verify field name is valid
valid_fields = [getattr(fields, f) for f in dir(fields) \
if f.startswith("FIELD_")]
for param in params:
param_name, field_type = param['name'], param['fieldType']
if param_name not in func.__code__.co_varnames:
raise AssertionError("Unknown parameter name {} specified for"\
" action {}".format(
param_name, func.__name__))
if field_type not in valid_fields:
raise AssertionError("Unknown field type {} specified for"\
" action {} param {}".format(
field_type, func.__name__, param_name))
def rule_action(label=None, params=None):
""" Decorator to make a function into a rule action
"""
def wrapper(func):
params_ = params
if isinstance(params, dict):
params_ = [dict(label=fn_name_to_pretty_label(name),
name=name,
fieldType=field_type) \
for name, field_type in list(params.items())]
_validate_action_parameters(func, params_)
func.is_rule_action = True
func.label = label \
or fn_name_to_pretty_label(func.__name__)
func.params = params_
return func
return wrapper
| 37.803571
| 79
| 0.585262
|
2bf159de67e8ade52e3285473fd2edb10b3f5e6e
| 2,667
|
py
|
Python
|
experiments/tests/test_hPotts.py
|
wmkouw/cc-infopriors
|
653079f201c8bce570dacb3479f4270ebe0de953
|
[
"MIT"
] | 1
|
2019-07-11T01:32:55.000Z
|
2019-07-11T01:32:55.000Z
|
experiments/tests/test_hPotts.py
|
wmkouw/cc-smoothprior
|
653079f201c8bce570dacb3479f4270ebe0de953
|
[
"MIT"
] | null | null | null |
experiments/tests/test_hPotts.py
|
wmkouw/cc-smoothprior
|
653079f201c8bce570dacb3479f4270ebe0de953
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Script to reproduce results from McGrory, Titterington, Reeves & Pettitt.
Author: W.M.Kouw
Date: 18-09-2018
"""
import pandas as pd
import numpy as np
import numpy.random as rnd
import scipy.optimize as opt
import scipy.stats as st
import matplotlib.pyplot as plt
from hPottsMRF import hiddenPotts
from util import generate_Potts
from vis import plot_segmentations
'''Experimental parameters'''
# Visualize predictions
vis = True
# Number of repetitions
nR = 1
# Number of classes
nK = 3
# Gaussian noise parameters
mu = [0, 0, 0]
si2 = [0.1, 0.1, 0.1]
# Smoothing parameter
beta = 2.0
# Shape of image
shape = (20, 20)
'''Repeat experiment'''
beta_hat = np.zeros((nR, nK))
em_hat = np.zeros((nR, nK))
la_hat = np.zeros((nR, nK))
ga_hat = np.zeros((nR, nK))
ks_hat = np.zeros((nR, nK))
for r in np.arange(nR):
# Report progress
print('At repetition ' + str(r) + '/' + str(nR))
# Generate image according to set parameters
Y, energy = generate_Potts(shape=shape, ncolors=nK, beta=beta)
# Add independent Gaussian noise
X = np.copy(Y).astype('float64')
for k in range(nK):
X[Y == k] += rnd.normal(mu[k], np.sqrt(si2[k]), np.sum(Y == k))
# Initialize model
model = hiddenPotts(num_classes=nK, tissue_specific=True)
# Map label image to one-hot
Y1 = model.one_hot(Y)
# Estimate smoothing parameter
beta_hat[r, :] = model.maximum_likelihood_beta(Y1, max_iter=10)
# Segment image
Y_hat, nu, theta = model.segment(X, beta=beta_hat[r, :], num_iter=10)
# Store estimated parameters
em_hat[r, :], la_hat[r, :], ga_hat[r, :], ks_hat[r, :] = theta
# Plot images, plus error image
if vis:
plot_segmentations(Y, X, Y_hat, show=True)
# Report results
print('Mean estimated beta = ' + str(np.mean(beta_hat, axis=0)))
'''Posteriors for hyperparameters.'''
em_h = np.mean(em_hat, axis=0)
la_h = np.mean(la_hat, axis=0)
ga_h = np.mean(ga_hat, axis=0)
ks_h = np.mean(ks_hat, axis=0)
# All classes are deemed independent
for k in range(nK):
# Check if mode can be computed
if la_h[k] >= 1:
# Modal posterior precision
tau_hat_k = (la_h[k]/2 - 1) / (ks_h[k]/2)
else:
# Expected posterior precision
tau_hat_k = la_h[k] / ks_h[k]
# Compute estimated sigma
si2_h_k = 1./tau_hat_k
# Expected posterior mean
mu_h_k = em_h[k]
# Check whether posterior distributions center around noise distributions
print("mu_h_" + str(k) + " = " + str(mu_h_k) + " (" + str(mu[k] + k) + ")")
print("si_h_" + str(k) + " = " + str(si2_h_k) + " (" + str(si2[k]) + ")")
| 23.60177
| 79
| 0.64342
|
462b4941431f5ee2114ba674ca233d345a5438c3
| 9,354
|
py
|
Python
|
lib/model/rpn/proposal_target_layer_faster_rcnn.py
|
Complicateddd/CascadeRCNN
|
019010e80411325dbde62f4d649e5a2ead8eabac
|
[
"MIT"
] | 53
|
2020-03-19T02:29:58.000Z
|
2022-03-25T08:47:32.000Z
|
lib/model/rpn/proposal_target_layer_faster_rcnn.py
|
Complicateddd/CascadeRCNN
|
019010e80411325dbde62f4d649e5a2ead8eabac
|
[
"MIT"
] | 5
|
2020-05-18T10:26:35.000Z
|
2021-05-18T08:36:54.000Z
|
lib/model/rpn/proposal_target_layer_faster_rcnn.py
|
Complicateddd/CascadeRCNN
|
019010e80411325dbde62f4d649e5a2ead8eabac
|
[
"MIT"
] | 6
|
2020-04-20T13:16:19.000Z
|
2020-12-08T14:47:36.000Z
|
from __future__ import absolute_import
# --------------------------------------------------------
# Faster R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick and Sean Bell
# --------------------------------------------------------
# --------------------------------------------------------
# Reorganized and modified by Jianwei Yang and Jiasen Lu
# --------------------------------------------------------
import torch
import torch.nn as nn
import numpy as np
import numpy.random as npr
from ..utils.config import cfg
from .bbox_transform import bbox_overlaps_batch, bbox_transform_batch
import pdb
class _ProposalTargetLayer(nn.Module):
"""
Assign object detection proposals to ground-truth targets. Produces proposal
classification labels and bounding-box regression targets.
"""
def __init__(self, nclasses):
super(_ProposalTargetLayer, self).__init__()
self._num_classes = nclasses
self.BBOX_NORMALIZE_MEANS = torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_MEANS)
self.BBOX_NORMALIZE_STDS = torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_STDS)
self.BBOX_INSIDE_WEIGHTS = torch.FloatTensor(cfg.TRAIN.BBOX_INSIDE_WEIGHTS)
def forward(self, all_rois, gt_boxes, num_boxes):
self.BBOX_NORMALIZE_MEANS = self.BBOX_NORMALIZE_MEANS.type_as(gt_boxes)
self.BBOX_NORMALIZE_STDS = self.BBOX_NORMALIZE_STDS.type_as(gt_boxes)
self.BBOX_INSIDE_WEIGHTS = self.BBOX_INSIDE_WEIGHTS.type_as(gt_boxes)
gt_boxes_append = gt_boxes.new(gt_boxes.size()).zero_()
gt_boxes_append[:,:,1:5] = gt_boxes[:,:,:4]
# Include ground-truth boxes in the set of candidate rois
all_rois = torch.cat([all_rois, gt_boxes_append], 1)
num_images = 1
rois_per_image = int(cfg.TRAIN.BATCH_SIZE / num_images)
fg_rois_per_image = int(np.round(cfg.TRAIN.FG_FRACTION * rois_per_image))
fg_rois_per_image = 1 if fg_rois_per_image == 0 else fg_rois_per_image
labels, rois, bbox_targets, bbox_inside_weights = self._sample_rois_pytorch(
all_rois, gt_boxes, fg_rois_per_image,
rois_per_image, self._num_classes)
bbox_outside_weights = (bbox_inside_weights > 0).float()
return rois, labels, bbox_targets, bbox_inside_weights, bbox_outside_weights
def backward(self, top, propagate_down, bottom):
"""This layer does not propagate gradients."""
pass
def reshape(self, bottom, top):
"""Reshaping happens during the call to forward."""
pass
def _get_bbox_regression_labels_pytorch(self, bbox_target_data, labels_batch, num_classes):
"""Bounding-box regression targets (bbox_target_data) are stored in a
compact form b x N x (class, tx, ty, tw, th)
This function expands those targets into the 4-of-4*K representation used
by the network (i.e. only one class has non-zero targets).
Returns:
bbox_target (ndarray): b x N x 4K blob of regression targets
bbox_inside_weights (ndarray): b x N x 4K blob of loss weights
"""
batch_size = labels_batch.size(0)
rois_per_image = labels_batch.size(1)
clss = labels_batch
bbox_targets = bbox_target_data.new(batch_size, rois_per_image, 4).zero_()
bbox_inside_weights = bbox_target_data.new(bbox_targets.size()).zero_()
for b in range(batch_size):
# assert clss[b].sum() > 0
if clss[b].sum() == 0:
continue
inds = torch.nonzero(clss[b] > 0).view(-1)
for i in range(inds.numel()):
ind = inds[i]
bbox_targets[b, ind, :] = bbox_target_data[b, ind, :]
bbox_inside_weights[b, ind, :] = self.BBOX_INSIDE_WEIGHTS
return bbox_targets, bbox_inside_weights
def _compute_targets_pytorch(self, ex_rois, gt_rois):
"""Compute bounding-box regression targets for an image."""
assert ex_rois.size(1) == gt_rois.size(1)
assert ex_rois.size(2) == 4
assert gt_rois.size(2) == 4
batch_size = ex_rois.size(0)
rois_per_image = ex_rois.size(1)
targets = bbox_transform_batch(ex_rois, gt_rois)
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
# Optionally normalize targets by a precomputed mean and stdev
targets = ((targets - self.BBOX_NORMALIZE_MEANS.expand_as(targets))
/ self.BBOX_NORMALIZE_STDS.expand_as(targets))
return targets
def _sample_rois_pytorch(self, all_rois, gt_boxes, fg_rois_per_image, rois_per_image, num_classes):
"""Generate a random sample of RoIs comprising foreground and background
examples.
"""
# overlaps: (rois x gt_boxes)
overlaps = bbox_overlaps_batch(all_rois, gt_boxes)
max_overlaps, gt_assignment = torch.max(overlaps, 2)
batch_size = overlaps.size(0)
num_proposal = overlaps.size(1)
num_boxes_per_img = overlaps.size(2)
offset = torch.arange(0, batch_size)*gt_boxes.size(1)
offset = offset.view(-1, 1).type_as(gt_assignment) + gt_assignment
# changed indexing way for pytorch 1.0
labels = gt_boxes[:,:,4].contiguous().view(-1)[(offset.view(-1),)].view(batch_size, -1)
labels_batch = labels.new(batch_size, rois_per_image).zero_()
rois_batch = all_rois.new(batch_size, rois_per_image, 5).zero_()
gt_rois_batch = all_rois.new(batch_size, rois_per_image, 5).zero_()
# Guard against the case when an image has fewer than max_fg_rois_per_image
# foreground RoIs
for i in range(batch_size):
fg_inds = torch.nonzero(max_overlaps[i] >= cfg.TRAIN.FG_THRESH).view(-1)
fg_num_rois = fg_inds.numel()
# Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI)
bg_inds = torch.nonzero((max_overlaps[i] < cfg.TRAIN.BG_THRESH_HI) &
(max_overlaps[i] >= cfg.TRAIN.BG_THRESH_LO)).view(-1)
bg_num_rois = bg_inds.numel()
if fg_num_rois > 0 and bg_num_rois > 0:
# sampling fg
fg_rois_per_this_image = min(fg_rois_per_image, fg_num_rois)
# torch.randperm seems has a bug on multi-gpu setting that cause the segfault.
# See https://github.com/pytorch/pytorch/issues/1868 for more details.
# use numpy instead.
#rand_num = torch.randperm(fg_num_rois).long().cuda()
rand_num = torch.from_numpy(np.random.permutation(fg_num_rois)).type_as(gt_boxes).long()
fg_inds = fg_inds[rand_num[:fg_rois_per_this_image]]
# sampling bg
bg_rois_per_this_image = rois_per_image - fg_rois_per_this_image
# Seems torch.rand has a bug, it will generate very large number and make an error.
# We use numpy rand instead.
#rand_num = (torch.rand(bg_rois_per_this_image) * bg_num_rois).long().cuda()
rand_num = np.floor(np.random.rand(bg_rois_per_this_image) * bg_num_rois)
rand_num = torch.from_numpy(rand_num).type_as(gt_boxes).long()
bg_inds = bg_inds[rand_num]
elif fg_num_rois > 0 and bg_num_rois == 0:
# sampling fg
#rand_num = torch.floor(torch.rand(rois_per_image) * fg_num_rois).long().cuda()
rand_num = np.floor(np.random.rand(rois_per_image) * fg_num_rois)
rand_num = torch.from_numpy(rand_num).type_as(gt_boxes).long()
fg_inds = fg_inds[rand_num]
fg_rois_per_this_image = rois_per_image
bg_rois_per_this_image = 0
elif bg_num_rois > 0 and fg_num_rois == 0:
# sampling bg
#rand_num = torch.floor(torch.rand(rois_per_image) * bg_num_rois).long().cuda()
rand_num = np.floor(np.random.rand(rois_per_image) * bg_num_rois)
rand_num = torch.from_numpy(rand_num).type_as(gt_boxes).long()
bg_inds = bg_inds[rand_num]
bg_rois_per_this_image = rois_per_image
fg_rois_per_this_image = 0
else:
raise ValueError("bg_num_rois = 0 and fg_num_rois = 0, this should not happen!")
# The indices that we're selecting (both fg and bg)
keep_inds = torch.cat([fg_inds, bg_inds], 0)
# Select sampled values from various arrays:
labels_batch[i].copy_(labels[i][keep_inds])
# Clamp labels for the background RoIs to 0
if fg_rois_per_this_image < rois_per_image:
labels_batch[i][fg_rois_per_this_image:] = 0
rois_batch[i] = all_rois[i][keep_inds]
rois_batch[i,:,0] = i
gt_rois_batch[i] = gt_boxes[i][gt_assignment[i][keep_inds]]
bbox_target_data = self._compute_targets_pytorch(
rois_batch[:,:,1:5], gt_rois_batch[:,:,:4])
bbox_targets, bbox_inside_weights = \
self._get_bbox_regression_labels_pytorch(bbox_target_data, labels_batch, num_classes)
return labels_batch, rois_batch, bbox_targets, bbox_inside_weights
| 43.915493
| 104
| 0.631922
|
43cad1520ca64e6de9d0ba38863611aa6383cc35
| 36
|
py
|
Python
|
homeassistant/components/openweathermap/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 23
|
2017-11-15T21:03:53.000Z
|
2021-03-29T21:33:48.000Z
|
homeassistant/components/openweathermap/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 47
|
2020-07-23T07:14:33.000Z
|
2022-03-31T06:01:46.000Z
|
homeassistant/components/openweathermap/__init__.py
|
klauern/home-assistant-core
|
c18ba6aec0627e6afb6442c678edb5ff2bb17db6
|
[
"Apache-2.0"
] | 10
|
2018-01-01T00:12:51.000Z
|
2021-12-21T23:08:05.000Z
|
"""The openweathermap component."""
| 18
| 35
| 0.722222
|
0e30a758c17b3bbac5ecf1dc23678df222e982e2
| 1,186
|
py
|
Python
|
opennem/utils/images.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | 22
|
2020-06-30T05:27:21.000Z
|
2022-02-21T12:13:51.000Z
|
opennem/utils/images.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | 71
|
2020-08-07T13:06:30.000Z
|
2022-03-15T06:44:49.000Z
|
opennem/utils/images.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | 13
|
2020-06-30T03:28:32.000Z
|
2021-12-30T08:17:16.000Z
|
from hashlib import md5
from io import BytesIO
from PIL import Image
def img_to_buffer(img: Image) -> memoryview:
"""
Convert image and save as JPEG
@TODO jpeg settings in opennem.settings
"""
buf = BytesIO()
# convert all to RGP
if img.mode in ("RGBA", "P", "LA"):
img = img.convert("RGB")
img.save(buf, format="JPEG")
return buf.getbuffer()
def image_get_hash(img: Image) -> str:
"""Image hash based on content"""
img = img.resize((10, 10), Image.ANTIALIAS)
img = img.convert("L")
pixel_data = list(img.getdata())
avg_pixel = sum(pixel_data) / len(pixel_data)
bits = "".join(["1" if (px >= avg_pixel) else "0" for px in pixel_data])
hex_representation = str(hex(int(bits, 2)))[2:][::-1].upper()
return hex_representation.lower()
def image_get_crypto_hash(img: Image, save_driver: str = "JPEG") -> str:
"""Get a cryptographic hash of an image"""
img_hash = md5()
# convert to RGB
img_rgb = img.convert("RGB")
with BytesIO() as memobj:
img_rgb.save(memobj, save_driver)
data = memobj.getvalue()
img_hash.update(data)
return img_hash.hexdigest()
| 23.254902
| 76
| 0.627319
|
57cf2ef1d70c3c57ab67473f52582f02f95a5988
| 1,794
|
py
|
Python
|
alipay/aop/api/response/AlipayCommerceOperationContentApplyResponse.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 213
|
2018-08-27T16:49:32.000Z
|
2021-12-29T04:34:12.000Z
|
alipay/aop/api/response/AlipayCommerceOperationContentApplyResponse.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 29
|
2018-09-29T06:43:00.000Z
|
2021-09-02T03:27:32.000Z
|
alipay/aop/api/response/AlipayCommerceOperationContentApplyResponse.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 59
|
2018-08-27T16:59:26.000Z
|
2022-03-25T10:08:15.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.OperationExtDataModel import OperationExtDataModel
class AlipayCommerceOperationContentApplyResponse(AlipayResponse):
def __init__(self):
super(AlipayCommerceOperationContentApplyResponse, self).__init__()
self._ext_data = None
self._out_biz_no = None
self._record_id = None
self._target_id = None
@property
def ext_data(self):
return self._ext_data
@ext_data.setter
def ext_data(self, value):
if isinstance(value, OperationExtDataModel):
self._ext_data = value
else:
self._ext_data = OperationExtDataModel.from_alipay_dict(value)
@property
def out_biz_no(self):
return self._out_biz_no
@out_biz_no.setter
def out_biz_no(self, value):
self._out_biz_no = value
@property
def record_id(self):
return self._record_id
@record_id.setter
def record_id(self, value):
self._record_id = value
@property
def target_id(self):
return self._target_id
@target_id.setter
def target_id(self, value):
self._target_id = value
def parse_response_content(self, response_content):
response = super(AlipayCommerceOperationContentApplyResponse, self).parse_response_content(response_content)
if 'ext_data' in response:
self.ext_data = response['ext_data']
if 'out_biz_no' in response:
self.out_biz_no = response['out_biz_no']
if 'record_id' in response:
self.record_id = response['record_id']
if 'target_id' in response:
self.target_id = response['target_id']
| 29.9
| 116
| 0.675585
|
90b6811f6f133af432be943358d82c6c70afdd9a
| 2,916
|
py
|
Python
|
Chapter07/Exercise7.03/bookr/reviews/models.py
|
PacktPublishing/Web-Development-with-Django-Second-Edition
|
a9c3d8e46176af612e3b8fe7bc2a2a8effafe981
|
[
"MIT"
] | 2
|
2022-01-03T22:17:21.000Z
|
2022-03-04T13:32:36.000Z
|
Chapter06/Activity6.01/bookr/reviews/models.py
|
PacktPublishing/Web-Development-with-Django-Second-Edition
|
a9c3d8e46176af612e3b8fe7bc2a2a8effafe981
|
[
"MIT"
] | null | null | null |
Chapter06/Activity6.01/bookr/reviews/models.py
|
PacktPublishing/Web-Development-with-Django-Second-Edition
|
a9c3d8e46176af612e3b8fe7bc2a2a8effafe981
|
[
"MIT"
] | 1
|
2022-02-25T13:53:37.000Z
|
2022-02-25T13:53:37.000Z
|
from django.contrib import auth
from django.db import models
class Publisher(models.Model):
"""A company that publishes books."""
name = models.CharField(max_length=50, help_text="The name of the Publisher.")
website = models.URLField(help_text="The Publisher's website.")
email = models.EmailField(help_text="The Publisher's email address.")
def __str__(self):
return self.name
class Book(models.Model):
"""A published book."""
title = models.CharField(max_length=70, help_text="The title of the book.")
publication_date = models.DateField(verbose_name="Date the book was published.")
isbn = models.CharField(max_length=20, verbose_name="ISBN number of the book.")
publisher = models.ForeignKey(Publisher, on_delete=models.CASCADE)
contributors = models.ManyToManyField("Contributor", through="BookContributor")
def __str__(self):
return f"{self.title} ({self.isbn})"
class Contributor(models.Model):
"""A contributor to a Book, e.g. author, editor, co-author."""
first_names = models.CharField(
max_length=50, help_text="The contributor's first name or names."
)
last_names = models.CharField(
max_length=50, help_text="The contributor's last name or names."
)
email = models.EmailField(help_text="The contact email for the contributor.")
def initialled_name(self):
"""self.first_names='Jerome David', self.last_names='Salinger'
=> 'Salinger, JD'"""
initials = "".join([name[0] for name in self.first_names.split(" ")])
return "{}, {}".format(self.last_names, initials)
def __str__(self):
return self.initialled_name()
class BookContributor(models.Model):
class ContributionRole(models.TextChoices):
AUTHOR = "AUTHOR", "Author"
CO_AUTHOR = "CO_AUTHOR", "Co-Author"
EDITOR = "EDITOR", "Editor"
book = models.ForeignKey(Book, on_delete=models.CASCADE)
contributor = models.ForeignKey(Contributor, on_delete=models.CASCADE)
role = models.CharField(
verbose_name="The role this contributor had in the book.",
choices=ContributionRole.choices,
max_length=20,
)
class Review(models.Model):
content = models.TextField(help_text="The Review text.")
rating = models.IntegerField(help_text="The rating the reviewer has given.")
date_created = models.DateTimeField(
auto_now_add=True, help_text="The date and time the review was created."
)
date_edited = models.DateTimeField(
null=True, help_text="The date and time the review was last edited."
)
creator = models.ForeignKey(auth.get_user_model(), on_delete=models.CASCADE)
book = models.ForeignKey(
Book, on_delete=models.CASCADE, help_text="The Book that this review is for."
)
def __str__(self):
return "{} - {}".format(self.creator.username, self.book.title)
| 36
| 85
| 0.687243
|
3b2ac914f848178cf3d590cb40163b9e2aabf074
| 302
|
py
|
Python
|
setup.py
|
alex-sherman/python-mrpc
|
a2b8998fab956b3e6dafcce6d1564691eafcfd23
|
[
"MIT"
] | 3
|
2017-09-28T21:29:31.000Z
|
2021-02-02T05:36:59.000Z
|
setup.py
|
alex-sherman/python-mrpc
|
a2b8998fab956b3e6dafcce6d1564691eafcfd23
|
[
"MIT"
] | 1
|
2017-09-28T21:33:31.000Z
|
2017-10-27T15:08:59.000Z
|
setup.py
|
alex-sherman/python-mrpc
|
a2b8998fab956b3e6dafcce6d1564691eafcfd23
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
setup(
name = "mrpc",
version = "0.1",
description = "A Python RPC framework for mesh networks and multiple protocols",
packages = ["mrpc"],
author='Alex Sherman',
author_email='asherman1024@gmail.com',
url='https://github.com/alex-sherman/python-mrpc')
| 27.454545
| 82
| 0.708609
|
f9f9950826ff78d9eef6dc00167de70fc4f1887d
| 825
|
py
|
Python
|
google/cloud/documentai/v1beta1/documentai-v1beta1-py/google/cloud/documentai_v1beta1/services/document_understanding_service/__init__.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 7
|
2021-02-21T10:39:41.000Z
|
2021-12-07T07:31:28.000Z
|
google/cloud/documentai/v1beta1/documentai-v1beta1-py/google/cloud/documentai_v1beta1/services/document_understanding_service/__init__.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 6
|
2021-02-02T23:46:11.000Z
|
2021-11-15T01:46:02.000Z
|
google/cloud/documentai/v1beta2/documentai-v1beta2-py/google/cloud/documentai_v1beta2/services/document_understanding_service/__init__.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 4
|
2021-01-28T23:25:45.000Z
|
2021-08-30T01:55:16.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import DocumentUnderstandingServiceClient
from .async_client import DocumentUnderstandingServiceAsyncClient
__all__ = (
'DocumentUnderstandingServiceClient',
'DocumentUnderstandingServiceAsyncClient',
)
| 35.869565
| 74
| 0.774545
|
2d5664dc3bd2c80e794b3f34b0601be208f62033
| 7,672
|
py
|
Python
|
TensorFI/fiConfig.py
|
ozturkosu/TensorFI
|
4551858f2417e604466493de8ef2626afd7c9967
|
[
"MIT"
] | 1
|
2018-10-11T07:27:45.000Z
|
2018-10-11T07:27:45.000Z
|
TensorFI/fiConfig.py
|
ozturkosu/TensorFI
|
4551858f2417e604466493de8ef2626afd7c9967
|
[
"MIT"
] | null | null | null |
TensorFI/fiConfig.py
|
ozturkosu/TensorFI
|
4551858f2417e604466493de8ef2626afd7c9967
|
[
"MIT"
] | null | null | null |
# Fault injection configuration information: this is used for the global fault injector
from enum import Enum
import numpy as np
from faultTypes import *
import yaml
import logging
# These are the list of supported Operations below (if you add a new Op, please add it here)
class Ops(Enum):
NOOP = "NOOP"
ASSIGN = "ASSIGN"
IDENTITY = "IDENTITY"
ADD = "ADD"
SUB = "SUB"
MUL = "MUL"
SQUARE = "SQUARE"
SHAPE = "SHAPE"
SIZE = "SIZE"
FILL = "FILL"
FLOORMOD = "FLOOR-MOD"
RANGE = "RANGE"
RANK = "RANK"
SUM = "SUM"
MATMUL = "MATMUL"
ARGMAX = "ARGMAX"
ARGMIN = "ARGMIN"
EQUAL = "EQUAL"
NOT_EQUAL = "NOT-EQUAL"
LESS_EQUAL = "LESS-EQUAL"
CAST = "CAST"
MEAN = "MEAN"
COUNT_NONZERO = "COUNT-NONZERO"
RESHAPE = "RESHAPE"
CONV2D = "CONV2D"
RELU = "RELU"
MAXPOOL = "MAX-POOL"
STRIDEDSLICE = "STRIDED-SLICE"
SOFTMAX = "SOFT-MAX"
MAXIMUM = "MAXIMUM"
MINIMUM = "MINIMUM"
EXPANDDIMS = "EXPAND-DIMS"
SWITCH = "SWITCH"
GREATER = "GREATER"
NEGATIVE = "NEGATIVE"
POWER = "POW"
REALDIV = "REALDIV"
ABSOLUTE = "ABSOLUTE"
RSQRT = "RSQRT"
LOG = "LOG"
BIASADD = "BIASADD"
SIGMOID = "SIGMOID"
TANH = "TANH"
PACK = "PACK"
UNPACK = "UNPACK"
ALL = "ALL" # Chooses all the operations for injection (end of list)
END = "END" # Dummy operation for end of list
# End of Ops
# These are the list of supported Fault types below (if you add a new type, please add it here)
class FaultTypes(Enum):
NONE = "None"
RAND = "Rand"
ZERO = "Zero"
# End of FaultTypes
# These are the list of supported Fields below (if you add a new Field, please add it here)
class Fields(Enum):
ScalarFaultType = "ScalarFaultType"
TensorFaultType = "TensorFaultType"
Ops = "Ops"
Seed = "Seed"
SkipCount = "SkipCount"
# End of Fields
# These are the fault configuration functions
# The global class fiConf holds the config functions
class FIConfig(object):
"Class to store configuration information about faults"
# Static variable: Mapping from fault types to fault injection functions
faultTypeMap = {
FaultTypes.NONE.value : (noScalar, noTensor),
FaultTypes.RAND.value : (randomScalar, randomTensor),
FaultTypes.ZERO.value : (zeroScalar, zeroTensor)
}
def faultConfigType(self, faultTypeScalar, faultTypeTensor):
"Configure the fault injection type for Scalars and Tensors"
# Check if the fault type is known and if so, assign the scalar functions
if self.faultTypeMap.has_key(faultTypeScalar):
self.faultTypeScalar = faultTypeScalar
self.injectScalar = self.faultTypeMap[ faultTypeScalar ][0]
else:
# If it's not known, declare an error
raise ValueError("Unknown fault type " + str(faultTypeScalar))
# Check if the fault type is known and if so, assign the tensor functions
if self.faultTypeMap.has_key(faultTypeTensor):
self.faultTypeTensor = faultTypeTensor
self.injectTensor = self.faultTypeMap[ faultTypeTensor ][1]
else:
# If it's not known, declare an error
raise ValueError("Unknown fault type " + str(faultTypeTensor))
def faultConfigOp(self, opType, prob = 1.0):
"Configure the fault injection operations"
# Check if it's a defined operation, and if so, add it to the injectMap
for op in Ops:
if op.value==opType:
# Convert the prob to a 32 bit floating point before adding it
probFP = np.float32(prob)
# Check if the probability is a sane value
if (probFP > 1.0 or probFP < 0.0):
raise ValueError("Probability has to be in range [0,1]")
# Finally, add the operation to the injectMap
self.injectMap[ op ] = probFP
def isSelected(self, op):
"Check if the op is among those selected for injection"
# Either all operations are selected or this particular one is selected
# FIXME: Add specific operation categories here in the future
return self.injectMap.has_key(op) or self.injectMap.has_key(Ops.ALL)
def getProbability(self, op):
"Retreive the probability of the op for injection if it's present, otherwise return ALL"
# Precondition: injectMap.has_key(op) or injectMap.has_key(Ops.ALL)
if self.injectMap.has_key(op):
return self.injectMap[ op ]
else:
return self.injectMap[ Ops.ALL ]
def __str__(self):
"Convert this object to a string representation for printing"
res = [ "FIConfig: {" ]
res.append("\tfaultTypeScalar : " + str(self.faultTypeScalar) )
res.append("\tfaultTypeTensor : " + str(self.faultTypeTensor) )
res.append("\tinjectMap : " + str(self.injectMap) )
res.append("\tfaultSeed : " + str(self.faultSeed) )
res.append("\tskipCount : " + str(self.skipCount) )
res.append(" }")
return "\n".join(res)
def __init__(self,fiParams):
"Configure the initial fault injection parameters from the fiParams Dictionary"
# First configure the Scalar fault type
# Default value of fault is NoFault
if fiParams.has_key(Fields.ScalarFaultType.value):
faultTypeScalar = fiParams[Fields.ScalarFaultType.value]
else:
faultTypeScalar = "None"
# Next configure the Tensor fault type
# Default value of fault is NoFault
if fiParams.has_key(Fields.TensorFaultType.value):
faultTypeTensor = fiParams[Fields.TensorFaultType.value]
else:
faultTypeTensor = "None"
# Finally, call the faultConfigtype function with the parameters
self.faultConfigType(faultTypeScalar, faultTypeTensor)
# Configure the operations to be included for instrumenting
# default value is inject nothing (empty op list)
self.injectMap = { }
if fiParams.has_key(Fields.Ops.value):
opsList = fiParams[Fields.Ops.value]
if not opsList==None:
for element in opsList:
(opType, prob) = element.split('=')
self.faultConfigOp(opType.rstrip(), prob.lstrip())
# Confligre the seed value if one is specified
# default value is none (so it's non-deterministic)
if fiParams.has_key(Fields.Seed.value):
self.faultSeed = np.int32(fiParams[Fields.Seed.value])
else:
self.faultSeed = None
# Configure the skip count if one is specified
# default value is 0
if fiParams.has_key(Fields.SkipCount.value):
self.skipCount = np.int32(fiParams[Fields.SkipCount.value])
else:
self.skipCount = 0
# End of constructor
# End of class FIConfig
# These are called from within modifyGraph to read the fault params in a file
def staticFaultParams():
"Statically hardcoded parameter values for testing"
params = { }
# Configure the fault types for Scalars and Tensors
params[Fields.ScalarFaultType.value] = FaultTypes.RAND.value # Scalar Fault type
params[Fields.TensorFaultType.value] = FaultTypes.RAND.value # Tensor Fault type
# List of Operations to fault inject and their probabilities
params[Fields.Ops.value] = [ "ADD = 0.5", "MUL = 1.0" ]
# Random seed for the fault injector
params[Fields.Seed] = 100000 # Random seed value
# How many operator counts to skip (typically for training)
params[Fields.SkipCount.value] = 1 # SkipCount value
# Make sure the parameter dict is returned back to the caller
return params
def yamlFaultParams(pStream):
"Read fault params from YAML file"
# NOTE: We assume pStream is a valid YAML stream
params = yaml.load(pStream)
return params
def configFaultParams(paramFile = None):
"Return the fault params from different files"
if paramFile == None:
return staticFaultParams()
params = {}
try:
paramStream = open(paramFile, "r")
except IOError:
print "Unable to open file ", paramFile
return params
# Check if the file extension is .yaml, and if so parse the Stream
# (right now, this is the only supported format, but can be extended)
if paramFile.endswith(".yaml"):
params = yamlFaultParams(paramStream)
else:
print "Unknown file format: ", paramFile
#print params
return params
| 31.834025
| 95
| 0.722888
|
e20e7c2e5f411957022dba70b10159cf28561dbf
| 972
|
py
|
Python
|
scrapy/commands/edit.py
|
h4ck3rm1k3/scrapy
|
59dcdbe84769c9d204f552a2b545b1e096a2d42c
|
[
"BSD-3-Clause"
] | 26
|
2015-02-07T17:35:26.000Z
|
2020-04-27T21:11:00.000Z
|
scrapy/commands/edit.py
|
h4ck3rm1k3/scrapy
|
59dcdbe84769c9d204f552a2b545b1e096a2d42c
|
[
"BSD-3-Clause"
] | 2
|
2021-09-20T19:54:29.000Z
|
2022-03-22T21:47:39.000Z
|
scrapy/commands/edit.py
|
h4ck3rm1k3/scrapy
|
59dcdbe84769c9d204f552a2b545b1e096a2d42c
|
[
"BSD-3-Clause"
] | 9
|
2015-09-21T08:17:20.000Z
|
2021-02-07T02:31:36.000Z
|
import sys, os
from scrapy.command import ScrapyCommand
from scrapy.exceptions import UsageError
class Command(ScrapyCommand):
requires_project = True
default_settings = {'LOG_ENABLED': False}
def syntax(self):
return "<spider>"
def short_desc(self):
return "Edit spider"
def long_desc(self):
return "Edit a spider using the editor defined in EDITOR setting"
def _err(self, msg):
sys.stderr.write(msg + os.linesep)
self.exitcode = 1
def run(self, args, opts):
if len(args) != 1:
raise UsageError()
editor = self.settings['EDITOR']
try:
spidercls = self.crawler_process.spiders.load(args[0])
except KeyError:
return self._err("Spider not found: %s" % args[0])
sfile = sys.modules[spidercls.__module__].__file__
sfile = sfile.replace('.pyc', '.py')
self.exitcode = os.system('%s "%s"' % (editor, sfile))
| 26.27027
| 73
| 0.616255
|
39d2d1e333821cb63f888fc9e30da35f74a06ee0
| 21,221
|
py
|
Python
|
bin/ensembl_prep.py
|
RahmanTeamDevelopment/CAVA
|
cbaf558d8a0614e3eb505beb50fabfb60f56b76a
|
[
"MIT"
] | 2
|
2017-02-16T12:29:29.000Z
|
2018-04-23T09:11:51.000Z
|
bin/ensembl_prep.py
|
RahmanTeamDevelopment/CAVA
|
cbaf558d8a0614e3eb505beb50fabfb60f56b76a
|
[
"MIT"
] | 1
|
2017-03-01T11:11:51.000Z
|
2017-03-01T11:11:51.000Z
|
bin/ensembl_prep.py
|
RahmanTeamDevelopment/CAVA
|
cbaf558d8a0614e3eb505beb50fabfb60f56b76a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Transcript database preparation tool (ensembl_prep)
#######################################################################################################################
# Basic imports
from __future__ import division
import os
import sys
import gzip
import datetime
from optparse import OptionParser
from operator import itemgetter
import pysam
import urllib
#######################################################################################################################
# Class representing a transcript
class Transcript(object):
# Constructor
def __init__(self):
self.ENST = None
self.GENE = None
self.ENSG = None
self.CHROM = None
self.STRAND = None
self.POS = None
self.POSEND = None
self.GENETYPE = None
self.TRANSTYPE = None
self.CODING_START = None
self.CODING_END = None
self.CODING_START_RELATIVE = None
self.CCDS = None
self.EXONS = []
self.PROTL = None
self.CDNAL = None
self.isComplete = None
# Get summary information about the transcript
def getInfoString(self):
if self.STRAND == '1': ret = '+/'
else: ret = '-/'
cdna = self.getcDNALength()
return ret+str(round((self.POSEND-self.POS+1)/1000,1))+'kb/'+str(len(self.EXONS))+'/'+str(round(cdna/1000,1))+'kb/'+str(self.getProteinLength())
# Get cDNA length of the transcript
def getcDNALength(self):
ret = 0
for exon in self.EXONS: ret += exon.END - exon.START
return ret
# Get protein length of the transcript
def getProteinLength(self):
codingdna = 0
if self.STRAND == '1':
for exon in self.EXONS:
if exon.END < self.CODING_START: continue
if exon.START > self.CODING_END: continue
if exon.START <= self.CODING_START <= exon.END: start = self.CODING_START
else: start = exon.START + 1
if exon.START <= self.CODING_END <= exon.END: end = self.CODING_END
else: end = exon.END
codingdna += end - start + 1
else:
for exon in self.EXONS:
if exon.START > self.CODING_START: continue
if exon.END < self.CODING_END: continue
if exon.START <= self.CODING_START <= exon.END: end = self.CODING_START
else: end = exon.END
if exon.START <= self.CODING_END <= exon.END: start = self.CODING_END
else: start = exon.START + 1
codingdna += end - start + 1
return int((codingdna - 3) / 3)
# Check if it is a candidate transcript
def isCandidate(self):
if not (self.GENETYPE=='protein_coding' and self.TRANSTYPE=='protein_coding'): return False
return (self.CODING_START is not None and self.CODING_END is not None) and self.isComplete
# Output transcript
def output(self, outfile, outfile_list):
out = self.ENST + '\t' + self.GENE + '\t' + self.ENSG + '\t' + self.getInfoString() + '\t' + self.CHROM + '\t' + self.STRAND + '\t' + str(self.POS)
out += '\t' + str(self.POSEND) + '\t' + str(self.CODING_START_RELATIVE) + '\t' + str(self.CODING_START)
out += '\t' + str(self.CODING_END)
for exondata in self.EXONS: out += '\t' + str(exondata.START) + '\t' + str(exondata.END)
outfile.write(out + '\n')
outfile_list.write(self.ENSG+'\t'+self.GENE+'\t'+self.ENST+'\n')
# Finalize transcript
def finalize(self):
if self.STRAND == '1':
self.POS = self.EXONS[0].START
self.POSEND = self.EXONS[len(self.EXONS) - 1].END
codingStartRelative = 0
for exondata in self.EXONS:
if exondata.START <= self.CODING_START <= exondata.END:
codingStartRelative += self.CODING_START - exondata.START
break
else:
codingStartRelative += exondata.END - exondata.START
self.CODING_START_RELATIVE = codingStartRelative
else:
self.POS = self.EXONS[len(self.EXONS) - 1].START
self.POSEND = self.EXONS[0].END
codingStartRelative = 0
for exondata in self.EXONS:
if exondata.START <= self.CODING_START <= exondata.END:
codingStartRelative += exondata.END - self.CODING_START + 1
break
else:
codingStartRelative += exondata.END - exondata.START
self.CODING_START_RELATIVE = codingStartRelative
self.PROTL = self.getProteinLength()
self.CDNAL = self.getcDNALength()
# Class representing an exon
class Exon(object):
# Constructor
def __init__(self, start, end):
self.START = start
self.END = end
# Class representing a gene
class Gene(object):
# Constructor
def __init__(self, symbol, ensg):
self.SYMBOL = symbol
self.ENSG = ensg
self.TRANSCRIPTS = dict()
# Select ICR transcript
def selectTranscript(self):
ccds_set = []
nonccds_set = []
for enst,transcript in self.TRANSCRIPTS.iteritems():
if transcript.CCDS: ccds_set.append(transcript)
else: nonccds_set.append(transcript)
if len(ccds_set) > 0: candidates = ccds_set
else: candidates = nonccds_set
selected = Transcript()
selected.PROTL = selected.CDNAL = -1
for t in candidates:
if t.PROTL > selected.PROTL: selected = t
elif t.PROTL == selected.PROTL and t.CDNAL > selected.CDNAL: selected = t
return selected
# Output all or selected transcripts
def output(self, outfile, outfile_list, select, mcg_transcripts):
if select:
if self.SYMBOL in mcg_transcripts.keys():
ok = False
for _,transcript in self.TRANSCRIPTS.iteritems():
if transcript.ENST in mcg_transcripts[self.SYMBOL]:
transcript.output(outfile,outfile_list)
ok = True
if self.SYMBOL not in mcg_transcripts.keys() or not ok:
transcript = self.selectTranscript()
transcript.output(outfile,outfile_list)
else:
for _,transcript in self.TRANSCRIPTS.iteritems():
transcript.output(outfile,outfile_list)
#######################################################################################################################
# Retrieve tag value
def getValue(tags, tag):
ret=None
for x in tags:
x = x.strip()
if x.startswith(tag):
s = x[x.find('\"') + 1:]
ret = s[:s.find('\"')]
break
return ret
# Retrieve boolean tag value
def getBooleanValue(tags, tag):
for x in tags:
x = x.strip()
if x.startswith('tag'):
s = x[x.find('\"') + 1:]
value = s[:s.find('\"')]
if value==tag: return True
return False
# Read transcript IDs from file
def readTranscriptIDs(inputfn):
ret = set()
for line in open(inputfn): ret.add(line.strip())
return ret
# Sort records in file
def sortRecords(records, idx1, idx2):
ret = []
chroms = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', 'MT', 'X', 'Y']
for i in range(len(chroms)):
chrom = chroms[i]
if chrom in records.keys():
records[chrom] = sorted(records[chrom], key=itemgetter(idx1,idx2))
for i in range(len(chroms)):
chrom = chroms[i]
if chrom in records.keys():
for record in records[chrom]: ret.append(record)
return ret
# Write records to file
def writeToFile(sortedRecords, filename):
outfile = open(filename, 'w')
for record in sortedRecords:
s = str(record[0]).rstrip()
for i in range(1, len(record)): s += '\t' + str(record[i]).rstrip()
outfile.write(s + '\n')
outfile.close()
# Read records from file as a list
def readRecords(inputfn):
ret = []
for line in open(inputfn): ret.append(line.strip())
return ret
# Process Ensembl data
def run(ver, options, genome_build):
# Read manually selected MCG transcripts from file
dir = os.path.dirname(os.path.realpath(sys.argv[0]))
mcg_transcripts=dict()
for line in open(dir+'/ensembl_prep/MCG_transcripts.txt'):
line = line.strip()
if line == '': continue
cols = line.split('\t')
if cols[0] not in mcg_transcripts.keys(): mcg_transcripts[cols[0]] = set()
mcg_transcripts[cols[0]].add(cols[1])
# Changing transcript for certain releases
if int(options.ensembl)>=71: mcg_transcripts['BMPR1A'] = {'ENST00000372037'}
if int(options.ensembl)>=69: mcg_transcripts['PRKAR1A'] = {'ENST00000392711'}
if int(options.ensembl)>=76:
mcg_transcripts['MEN1'] = {'ENST00000394374'}
mcg_transcripts['RECQL4'] = {'ENST00000617875'}
# Dictionary of Gene objects
genesdata = dict()
# Load custom transcript IDs
transIDs = set()
if options.input is not None:
transIDs = readTranscriptIDs(options.input)
print '\nOnly ' + str(len(transIDs)) + ' transcripts read from ' + options.input + ' are considered\n'
else: print '\nAll transcripts from the Ensembl release are considered\n'
# Print out info
if options.select: print 'Transcript selection switched on\n'
# Load candidate and CCDS data for Ensembl <75
candidates = dict()
if int(options.ensembl) < 75:
for line in open(dir+'/ensembl_prep/info'+options.ensembl+'.txt'):
line=line.strip()
if line=='': continue
cols = line.split('\t')
if cols[0] not in candidates.keys(): candidates[cols[0]]=dict()
candidates[cols[0]][cols[1]]=int(cols[2])
# Download Ensembl data
sys.stdout.write('Downloading Ensembl database... ')
sys.stdout.flush()
url = 'ftp://ftp.ensembl.org/pub/release-' + options.ensembl + '/gtf/homo_sapiens/Homo_sapiens.' + genome_build + '.' + options.ensembl + '.gtf.gz'
try:
urllib.urlretrieve(url, 'ensembl_data.gz')
except:
print '\n\nCannot connect to Ensembl FTP site. No internet connection?\n'
quit()
sys.stdout.write('OK\n')
# Iterate through the lines in the ensembl data file
sys.stdout.write('Extracting transcript data... ')
sys.stdout.flush()
first = True
prevenst = ''
transcript = None
for line in gzip.open('ensembl_data.gz', 'r'):
line = line.strip()
if line.startswith('#'): continue
cols = line.split('\t')
# Only consider transcripts on the following chromosomes
if cols[0] not in ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', 'MT', 'X', 'Y']: continue
# Consider only certain types of lines
if cols[2] not in ['exon','transcript','start_codon','stop_codon']: continue
# Annotation tags
tags = cols[8].split(';')
# Retrieve transcript ID
enst = getValue(tags, 'transcript_id')
# Do not consider transcript if it is not on the custom transcript list
if options.input is not None and enst not in transIDs: continue
# Finalize and output transcript object
if not enst == prevenst:
# Finalize transcript and add to Gene object if candidate
if not first:
transcript.finalize()
if transcript.isCandidate():
if transcript.ENSG not in genesdata.keys(): genesdata[transcript.ENSG] = Gene(transcript.GENE, transcript.ENSG)
genesdata[transcript.ENSG].TRANSCRIPTS[transcript.ENST] = transcript
# Initialize new Transcript object
transcript = Transcript()
transcript.ENST = enst
transcript.GENE = getValue(tags, 'gene_name')
transcript.ENSG = getValue(tags, 'gene_id')
transcript.CHROM = cols[0]
if cols[6] == '+': transcript.STRAND = '1'
else: transcript.STRAND = '-1'
# Retrieve gene biotype and transcript biotype
transcript.GENETYPE = getValue(tags, 'gene_type')
if transcript.GENETYPE is None: transcript.GENETYPE = getValue(tags, 'gene_biotype')
transcript.TRANSTYPE = getValue(tags, 'transcript_type')
if transcript.TRANSTYPE is None: transcript.TRANSTYPE = getValue(tags, 'transcript_biotype')
if transcript.TRANSTYPE is None: transcript.TRANSTYPE = cols[1]
# If line represents an exon
if cols[2] == 'exon':
idx = 0
for x in tags:
x = x.strip()
if x.startswith('exon_number'):
s = x[x.find('\"') + 1:]
idx = int(s[:s.find('\"')]) - 1
break
start = int(cols[3]) - 1
end = int(cols[4])
if idx >= len(transcript.EXONS):
for _ in range(len(transcript.EXONS), idx + 1): transcript.EXONS.append(None)
transcript.EXONS[idx] = Exon(start, end)
if cols[2] == 'start_codon':
if transcript.STRAND == '1':
if transcript.CODING_START is None or int(cols[3]) < transcript.CODING_START: transcript.CODING_START = int(cols[3])
else:
if transcript.CODING_START is None or int(cols[4]) > transcript.CODING_START: transcript.CODING_START = int(cols[4])
if cols[2] == 'stop_codon':
if transcript.STRAND == '1':
if transcript.CODING_END is None or int(cols[4]) > transcript.CODING_END: transcript.CODING_END = int(cols[4])
else:
if transcript.CODING_END is None or int(cols[3]) < transcript.CODING_END: transcript.CODING_END = int(cols[3])
# Check if transcript is complete and is a CCDS transcript
if transcript.isComplete is None:
if int(options.ensembl) < 75:
if transcript.ENST in candidates[transcript.CHROM].keys():
transcript.CCDS = (candidates[transcript.CHROM][transcript.ENST] == 1)
transcript.isComplete = True
else:
transcript.isComplete = False
else:
transcript.isComplete = not (getBooleanValue(tags, 'cds_start_NF') or getBooleanValue(tags, 'cds_end_NF'))
if getValue(tags, 'ccds_id') is not None: transcript.CCDS=True
else: transcript.CCDS=False
prevenst = enst
if first: first = False
# Finalize last transcript and add to Gene object if candidate
if transcript is not None:
transcript.finalize()
if transcript.isCandidate():
if transcript.ENSG not in genesdata.keys(): genesdata[transcript.ENSG] = Gene(transcript.GENE, transcript.ENSG)
genesdata[transcript.ENSG].TRANSCRIPTS[transcript.ENST] = transcript
# If no transcript ID from the input file was found in the Ensembl release
if len(genesdata) == 0:
print '\n\nNo transcripts from '+options.input+' found in Ensembl release.'
print '\nNo transcript database created.'
print "-----------------------------------------------------------------\n"
os.remove('ensembl_data.gz')
quit()
# Initialize temporary output file
outfile = open('temp.txt', 'w')
# Initialize output list file if needed
outfile_list = open(options.output+'.txt','w')
outfile_list.write('# Created by ensembl_prep '+ver+' based on Ensembl release '+options.ensembl+' (genome build '+genome_build+')\n')
outfile_list.write('ENSG\tGENE\tENST\n')
# Output transcripts of each gene
for ensg, gene in genesdata.iteritems(): gene.output(outfile,outfile_list,options.select,mcg_transcripts)
# Close temporary output files
outfile.close()
outfile_list.close()
# Sort temporary output file
data = dict()
counter = 0
for line in open('temp.txt'):
if not line.startswith('ENST'): continue
counter += 1
line.rstrip()
record = line.split('\t')
record[6] = int(record[6])
if record[4] in data.keys():
data[record[4]].append(record)
else:
data[record[4]] = []
data[record[4]].append(record)
sys.stdout.write('OK\n')
sys.stdout.write('Sorting transcripts... ')
sys.stdout.flush()
sortedRecords = sortRecords(data, 6, 7)
writeToFile(sortedRecords, options.output)
# Remove temporary files
sys.stdout.write('OK\n')
sys.stdout.write('Removing temporary files... ')
sys.stdout.flush()
os.remove('temp.txt')
os.remove('ensembl_data.gz')
sys.stdout.write('OK\n')
# Return sorted records
return len(sortedRecords)
# Use Tabix to index output file
def indexFile(options):
sys.stdout.write('Compressing output file... ')
sys.stdout.flush()
pysam.tabix_compress(options.output, options.output + '.gz', force=True)
sys.stdout.write('OK\n')
sys.stdout.write('Indexing output file... ')
sys.stdout.flush()
pysam.tabix_index(options.output + '.gz', seq_col=4, start_col=6, end_col=7, meta_char='#', force=True)
sys.stdout.write('OK\n')
# CHeck if string is a number (integer)
def is_number(s):
try:
int(s)
return True
except ValueError:
return False
#######################################################################################################################
if __name__ == '__main__':
# Version number
ver = 'v1.2.0'
# Command line argument parsing
descr = 'ensembl_prep '+ver+' is a simple tool for generating the local Ensembl transcript database file used by CAVA (via the @ensembl option flag).'
epilog = '\nExample usage: ./ensembl_prep.py -i input.txt -e 70 -o out -s\n\n'
OptionParser.format_epilog = lambda self, formatter: self.epilog
parser = OptionParser(usage='python path/to/cava/ensembl_prep.py <options>', version=ver, description=descr,epilog=epilog)
parser.add_option('-i', "--in", default=None, dest='input', action='store',help="Input filename (list of ENST IDs)")
parser.add_option('-o', "--out", default=None, dest='output', action='store', help="Output filename prefix")
parser.add_option('-e', "--ens", default=None, dest='ensembl', action='store', help="Ensembl release version")
#parser.add_option('-g', "--genome", dest='genome', action='store', default='GRCh37',help="Human genome reference version (default: %default)")
parser.add_option('-s', "--select", default=False, dest='select', action='store_true',help="Select transcript for each gene [default: %default]")
(options, args) = parser.parse_args()
# Checking if all required options specified
if options.ensembl is None:
print '\nError: no Ensembl release specified. Use option -h to get help!\n'
quit()
if not is_number(options.ensembl):
print '\nError: Ensembl release specified is not an integer. Use option -h to get help!\n'
quit()
if options.output is None:
print '\nError: no output file name specified. Use option -h to get help!\n'
quit()
# Must use Ensembl release >= 70
if not (int(options.ensembl) >= 70 or int(options.ensembl) == 65) :
print '\nError: This version works with Ensembl v65 or >= v70.\n'
quit()
# Genome build
# genome_build = options.genome
genome_build = 'GRCh37' if int(options.ensembl) <= 75 else 'GRCh38'
# Printing out version information
print "\n---------------------------------------------------------------------------------------"
print 'CAVA ' + ver + ' transcript database preparation tool (ensembl_prep) is now running'
print 'Started: ', datetime.datetime.now(), '\n'
# Print info
print 'Ensembl version: ' + options.ensembl
print 'Reference genome: ' + genome_build
# Creating compressed output file
Nretrieved = run(ver, options, genome_build)
print '\nA total of ' + str(Nretrieved) + ' transcripts have been retrieved\n'
# Indexing output file with Tabix
indexFile(options)
# Removing uncompressed output file
os.remove(options.output)
# Printing out summary information
print ''
print '---------------------'
print 'Output files created:'
print '---------------------'
print options.output + '.gz (transcript database)'
print options.output + '.gz.tbi (index file)'
print options.output + '.txt (list of transcripts)'
print ''
print 'CAVA ensembl_prep successfully finished: ', datetime.datetime.now()
print "---------------------------------------------------------------------------------------\n"
#######################################################################################################################
| 38.374322
| 181
| 0.581122
|
961af5c7ca95bf4b1a45d10a163bfba50883e7b0
| 2,841
|
py
|
Python
|
deepdata/mirror.py
|
killf/deepdata
|
4aea89265be6ad6b9a8aa0d5ad1487ac687a62c9
|
[
"Apache-2.0"
] | null | null | null |
deepdata/mirror.py
|
killf/deepdata
|
4aea89265be6ad6b9a8aa0d5ad1487ac687a62c9
|
[
"Apache-2.0"
] | null | null | null |
deepdata/mirror.py
|
killf/deepdata
|
4aea89265be6ad6b9a8aa0d5ad1487ac687a62c9
|
[
"Apache-2.0"
] | null | null | null |
import os
from os.path import join
import deepdata
__all__ = []
BASE_PATH = os.environ.get('DEEPDATA_MIRROR', 'http://pytorch_mirror.killf.info')
DATA_PATH = join(BASE_PATH, 'torchvision', 'mnist')
deepdata.mnist.MNIST.resources = [
(join(DATA_PATH, 'train-images-idx3-ubyte.gz'), "f68b3c2dcbeaaa9fbdd348bbdeb94873"),
(join(DATA_PATH, 'train-labels-idx1-ubyte.gz'), "d53e105ee54ea40749a09fcbcd1e9432"),
(join(DATA_PATH, 't10k-images-idx3-ubyte.gz'), "9fb629c4189551a2d022fa330f9573f3"),
(join(DATA_PATH, 't10k-labels-idx1-ubyte.gz'), "ec29112dd5afa0611ce80d1b7f02629c")
]
DATA_PATH = join(BASE_PATH, 'torchvision', 'fashion-mnist')
deepdata.mnist.FashionMNIST.resources = [
(join(DATA_PATH, 'train-images-idx3-ubyte.gz'), '8d4fb7e6c68d591d4c3dfef9ec88bf0d'),
(join(DATA_PATH, 'train-labels-idx1-ubyte.gz'), '25c81989df183df01b3e8a0aad5dffbe'),
(join(DATA_PATH, 't10k-images-idx3-ubyte.gz'), 'bef4ecab320f06d8554ea6380940ec79'),
(join(DATA_PATH, 't10k-labels-idx1-ubyte.gz'), 'bb300cfdad3c16e7a12a480ee83cd310')
]
DATA_PATH = join(BASE_PATH, 'torchvision', 'kmnist')
deepdata.mnist.KMNIST.resources = [
(join(DATA_PATH, 'train-images-idx3-ubyte.gz'), 'bdb82020997e1d708af4cf47b453dcf7'),
(join(DATA_PATH, 'train-labels-idx1-ubyte.gz'), 'e144d726b3acfaa3e44228e80efcd344'),
(join(DATA_PATH, 't10k-images-idx3-ubyte.gz'), '5c965bf0a639b31b8f53240b1b52f4d7'),
(join(DATA_PATH, 't10k-labels-idx1-ubyte.gz'), '7320c461ea6c1c855c0b718fb2a4b134')
]
DATA_PATH = join(BASE_PATH, 'torchvision', 'emnist')
deepdata.mnist.EMNIST.resources = [
(join(DATA_PATH, 'train-images-idx3-ubyte.gz'), 'f68b3c2dcbeaaa9fbdd348bbdeb94873'),
(join(DATA_PATH, 'train-labels-idx1-ubyte.gz'), 'd53e105ee54ea40749a09fcbcd1e9432'),
(join(DATA_PATH, 't10k-images-idx3-ubyte.gz'), '9fb629c4189551a2d022fa330f9573f3'),
(join(DATA_PATH, 't10k-labels-idx1-ubyte.gz'), 'ec29112dd5afa0611ce80d1b7f02629c')
]
DATA_PATH = join(BASE_PATH, 'torchvision', 'qmnist')
deepdata.mnist.QMNIST.resources = {
'train': [(join(DATA_PATH, 'qmnist-train-images-idx3-ubyte.gz'), 'ed72d4157d28c017586c42bc6afe6370'),
(join(DATA_PATH, 'qmnist-train-labels-idx2-int.gz'), '0058f8dd561b90ffdd0f734c6a30e5e4')],
'test': [(join(DATA_PATH, 'qmnist-test-images-idx3-ubyte.gz'), '1394631089c404de565df7b7aeaf9412'),
(join(DATA_PATH, 'qmnist-test-labels-idx2-int.gz'), '5b5b05890a5e13444e108efe57b788aa')],
'nist': [(join(DATA_PATH, 'xnist-images-idx3-ubyte.xz'), '7f124b3b8ab81486c9d8c2749c17f834'),
(join(DATA_PATH, 'xnist-labels-idx2-int.xz'), '5ed0e788978e45d4a8bd4b7caec3d79d')]
}
DATA_PATH = join(BASE_PATH, 'torchvision', 'cifar')
deepdata.cifar.CIFAR10.url = join(DATA_PATH, 'cifar-10-python.tar.gz')
deepdata.cifar.CIFAR100.url = join(DATA_PATH, 'cifar-100-python.tar.gz')
| 51.654545
| 105
| 0.739176
|
8cbad2a49c7e207819ddbbf52aeb71b65b886297
| 9,753
|
py
|
Python
|
make_fandom_data.py
|
Nellius/FanFiction-FandomData
|
da0f0d18c0dac1f35ba736d00f96b3e17b2dfff9
|
[
"MIT"
] | 1
|
2019-09-19T01:24:05.000Z
|
2019-09-19T01:24:05.000Z
|
make_fandom_data.py
|
Nellius/FanFiction-FandomData
|
da0f0d18c0dac1f35ba736d00f96b3e17b2dfff9
|
[
"MIT"
] | null | null | null |
make_fandom_data.py
|
Nellius/FanFiction-FandomData
|
da0f0d18c0dac1f35ba736d00f96b3e17b2dfff9
|
[
"MIT"
] | null | null | null |
"""Scrape fandom data from Fanfiction.net and write JSON files."""
# -*- coding: utf-8 -*-
import sys
import time
import re
import os
import json
from datetime import datetime, timezone
from functools import reduce
from bs4 import BeautifulSoup
import undetected_chromedriver as uc
class SectionData:
"""Scrape self.url and store section fandom data.
Attributes:
url (str): section url
id (str): section id
name (str): section name
crossover (str): 'not_crossover' or 'crossover'
fandoms (list): list of fandom data {'name': str, 'url': str, 'rough_story_number': int}
"""
def __init__(self, url: str):
"""Initialize.
Args:
url (str): Fanfiction.net section page
ex: https://www.fanfiction.net/book/
"""
url_array = url.split('/')
self.url = url
self.id = url_array[-2]
self.name = ''
self.crossover = 'crossover' if url_array[-3] == "crossovers" else 'not_crossover'
self.fandoms = []
def scrape(self, html: str):
"""Scrape self.url and set self.name and self.fandoms."""
soup = BeautifulSoup(html, "lxml")
regex = r"( Crossover)? \| FanFiction$"
self.name = re.sub(regex, '', soup.find('title').string)
div_tags = soup.find('div', id='list_output').find_all('div')
for div_tag in div_tags:
a_tag = div_tag.find('a')
url = 'https://www.fanfiction.net' + a_tag.get('href')
# * a_tag.get('title') might contain "\'" or '\""'
# * "lxml" can't parse a_tag.get('title') with '\""' correctly
# * a_tag with long text is abbreviated by '...'
# If a_tag.get('title') contain '\""',
if re.search(r"\\$", a_tag.get('title')):
# If a_tag.text is abbreviated by '...'
if re.search(r"\.\.\.$", a_tag.text):
# Get fandom name by scraping url
name = self.get_fandom_name(url)
else:
# Use a_tag.text
name = a_tag.text
else:
# Replace "\'" with "'" and use a_tag.get('title')
name = re.sub(r"\\'", "'", a_tag.get('title'))
str_numbers = div_tag.find('span').text[1:-1] \
.replace(',', '').replace('K', ' 1000').replace('M', ' 1000000').split(' ')
numbers = [float(i) for i in str_numbers]
rough_story_number = int(reduce((lambda x, y: x * y), numbers))
fandom = {
'name': name,
'url': url,
'rough_story_number': rough_story_number
}
self.fandoms.append(fandom)
@staticmethod
def get_fandom_name(url: str) -> str:
"""Scrape browse page url and return fandom name.
Args:
url (str): url of fandom browse page
ex: https://www.fanfiction.net/book/Harry-Potter/
Returns:
str: scraped fandom name
"""
options = uc.ChromeOptions()
options.headless = True
options.add_argument('--headless')
chrome = uc.Chrome(options=options)
chrome.get(url)
html = chrome.page_source
soup = BeautifulSoup(html, "lxml")
regex = r" (FanFiction Archive|Crossover) \| FanFiction$"
name = re.sub(regex, '', soup.find('title').string)
time.sleep(5)
chrome.quit()
return name
class FandomData:
"""Make fandom database by each SectionData and write json file.
Attributes:
sections (list): list of SectionData
date (str): ISO 8601 date when json file was written
database (dict): fandom database
overridden by make_database(), make_unified_database(),
and make_exceptional_fandom_database()
"""
def __init__(self, urls: list):
"""Initialize.
Args:
urls (list): fandom url list for SectionData
"""
self.sections = [SectionData(url) for url in urls]
self.date = datetime.now(timezone.utc).isoformat()
self.database = {}
def scrape(self):
"""Scrape all urls of self.sections."""
print("Start scraping...")
options = uc.ChromeOptions()
options.headless = True
options.add_argument('--headless')
chrome = uc.Chrome(options=options)
length = len(self.sections)
for i, section in enumerate(self.sections):
print(section.url)
try:
chrome.get(section.url)
html = chrome.page_source
section.scrape(html)
except Exception as e:
print("Get page source error!")
print(e)
if i != length - 1:
time.sleep(5)
chrome.quit()
def make_database(self):
"""Make self.database which has the same structure of Fanfiction.net."""
print("Make fandom database")
self.database = {'date': self.date}
for section in self.sections:
if section.crossover not in self.database:
self.database[section.crossover] = {}
self.database[section.crossover][section.id] = {}
database = self.database[section.crossover][section.id]
database['name'] = section.name
database['url'] = section.url
database['fandoms'] = section.fandoms
def make_unified_database(self):
"""Make self.database as unified fandom database sorted by fandom['name'] key."""
print("Make unified fandom database")
self.database = {
'date': self.date,
# ! all [] generated by dict.fromkeys(['a', 'b', 'c'], []) have same address
'sections': dict.fromkeys([section.id for section in self.sections])
}
for section in self.sections:
if not isinstance(self.database['sections'][section.id], dict):
self.database['sections'][section.id] = {}
section_dict = self.database['sections'][section.id]
if 'name' not in section_dict:
section_dict['name'] = section.name
section_dict[section.crossover + "_url"] = section.url
fandom_names = [fandom['name']
for section in self.sections for fandom in section.fandoms]
# ! all [] generated by dict.fromkeys(['a', 'b', 'c'], []) have same address
self.database['fandoms'] = dict.fromkeys(sorted(fandom_names))
for section in self.sections:
for fandom in section.fandoms:
if not isinstance(self.database['fandoms'][fandom['name']], list):
self.database['fandoms'][fandom['name']] = []
self.database['fandoms'][fandom['name']].append({
'section_id': section.id,
'crossover': section.crossover == 'crossover',
'url': fandom['url'],
'rough_story_number': fandom['rough_story_number']
})
def make_exceptional_fandom_database(self):
"""Make self.database as crossover fandom database which name contain ' & '."""
print("Make exceptional fandom crossover database")
self.database = {'date': self.date}
exceptional_fandomlist = [
fandom['name'] for section in self.sections for fandom in section.fandoms
if section.crossover == 'crossover' and ' & ' in fandom['name']
]
self.database['fandoms'] = sorted(list(dict.fromkeys(exceptional_fandomlist)))
def write_json_file(self, filename: str):
"""Write self.database as json file by filename.
Args:
filename (str): json filename
"""
print("Write " + filename)
file_path = os.path.dirname(filename)
if not os.path.exists(file_path):
os.makedirs(file_path)
with open(filename, 'w') as json_file:
json.dump(self.database, json_file, indent=4, ensure_ascii=False)
def main():
"""Scrape section_urls and write json files."""
section_urls = [
'https://www.fanfiction.net/anime/',
'https://www.fanfiction.net/book/',
'https://www.fanfiction.net/cartoon/',
'https://www.fanfiction.net/comic/',
'https://www.fanfiction.net/game/',
'https://www.fanfiction.net/misc/',
'https://www.fanfiction.net/play/',
'https://www.fanfiction.net/movie/',
'https://www.fanfiction.net/tv/',
'https://www.fanfiction.net/crossovers/anime/',
'https://www.fanfiction.net/crossovers/book/',
'https://www.fanfiction.net/crossovers/cartoon/',
'https://www.fanfiction.net/crossovers/comic/',
'https://www.fanfiction.net/crossovers/game/',
'https://www.fanfiction.net/crossovers/misc/',
'https://www.fanfiction.net/crossovers/play/',
'https://www.fanfiction.net/crossovers/movie/',
'https://www.fanfiction.net/crossovers/tv/',
]
fandom_data = FandomData(section_urls)
fandom_data.scrape()
json_filename = './json/fandom.json'
unified_json_filename = './json/unified-fandom.json'
exceptional_fandom_json_filename = './json/exceptional-fandom.json'
fandom_data.make_database()
fandom_data.write_json_file(json_filename)
fandom_data.make_unified_database()
fandom_data.write_json_file(unified_json_filename)
fandom_data.make_exceptional_fandom_database()
fandom_data.write_json_file(exceptional_fandom_json_filename)
if __name__ == '__main__':
main()
| 35.209386
| 96
| 0.574285
|
2d4b74bfd7c3612f473ad08e6a089fb68494c2a3
| 4,120
|
py
|
Python
|
aiida_optimade/mappers/structures.py
|
CasperWA/aiida-optimade
|
9516e1521859efb9962f34841bc82d4e583dda38
|
[
"MIT"
] | null | null | null |
aiida_optimade/mappers/structures.py
|
CasperWA/aiida-optimade
|
9516e1521859efb9962f34841bc82d4e583dda38
|
[
"MIT"
] | 2
|
2020-03-09T14:59:48.000Z
|
2020-03-09T15:00:20.000Z
|
aiida_optimade/mappers/structures.py
|
CasperWA/aiida-optimade
|
9516e1521859efb9962f34841bc82d4e583dda38
|
[
"MIT"
] | null | null | null |
from optimade.models import StructureResourceAttributes
from aiida_optimade.translators import StructureDataTranslator
from .entries import ResourceMapper
__all__ = ("StructureMapper",)
class StructureMapper(ResourceMapper):
"""Map 'structure' resources from OPTiMaDe to AiiDA"""
ENDPOINT = "structures"
ALIASES = (
("id", "id"),
("immutable_id", "uuid"),
("last_modified", "mtime"),
("type", "extras.something.non.existing.type"),
)
TRANSLATOR = StructureDataTranslator
ALL_ATTRIBUTES = list(StructureResourceAttributes.schema().get("properties").keys())
REQUIRED_ATTRIBUTES = StructureResourceAttributes.schema().get("required")
@classmethod
def map_back(cls, entity_properties: dict) -> dict:
"""Map properties from AiiDA to OPTiMaDe
:return: A resource object in OPTiMaDe format
"""
mapping = ((real, alias) for alias, real in cls.all_aliases())
new_object_attributes = {}
new_object = {}
for real, alias in mapping:
if (
real in entity_properties
and entity_properties[real] is not None
and alias not in ["id", "type"]
):
new_object_attributes[alias] = entity_properties[real]
# Particular attributes
# Remove "extras.optimade." prefix from reals to create aliases
reals = []
for field, value in entity_properties.items():
if field.startswith(cls.PROJECT_PREFIX):
if value is None:
continue
reals.append(field)
for real in reals:
alias = real[len(cls.PROJECT_PREFIX) :]
new_object_attributes[alias] = entity_properties[real]
if "id" in entity_properties:
new_object["id"] = entity_properties["id"]
else:
raise KeyError(
f'"id" should be present in entity_properties: {entity_properties}'
)
new_object["attributes"] = cls.build_attributes(
new_object_attributes, new_object["id"]
)
new_object["type"] = cls.ENDPOINT
return new_object
@classmethod
def build_attributes(cls, retrieved_attributes: dict, entry_pk: int) -> dict:
"""Build attributes dictionary for OPTiMaDe structure resource
:param retrieved_attributes: Dict of new attributes, will be updated accordingly
:type retrieved_attributes: dict
:param entry_pk: The AiiDA Node's PK
:type entry_pk: int
"""
import json
res = {}
float_fields_stored_as_strings = {"elements_ratios"}
# Add existing attributes
# TODO: Use sets instead!!
missing_attributes = cls.ALL_ATTRIBUTES.copy()
for existing_attribute, value in retrieved_attributes.items():
if existing_attribute in float_fields_stored_as_strings and value:
value = json.loads(str(value))
res[existing_attribute] = value
if existing_attribute in missing_attributes:
missing_attributes.remove(existing_attribute)
# Create and add new attributes
if missing_attributes:
translator = cls.TRANSLATOR(entry_pk)
for attribute in missing_attributes:
try:
create_attribute = getattr(translator, attribute)
except AttributeError:
if attribute in cls.REQUIRED_ATTRIBUTES:
translator = None
raise NotImplementedError(
f"Parsing required {attribute} from "
f"{cls.TRANSLATOR} has not yet been implemented."
)
# Print warning that parsing non-required attribute has not yet been implemented
else:
res[attribute] = create_attribute()
# Store new attributes in `extras`
translator.store_attributes()
translator = None
return res
| 35.517241
| 100
| 0.601214
|
33fd6f7433872acafbdaa811da4a9e0c21f236d0
| 8,766
|
py
|
Python
|
chapter_01/deep_q_learning.py
|
linklab/link_rl_book_codes
|
b272b46d5ecd2802f34648440ff53641c68cbbf0
|
[
"MIT"
] | null | null | null |
chapter_01/deep_q_learning.py
|
linklab/link_rl_book_codes
|
b272b46d5ecd2802f34648440ff53641c68cbbf0
|
[
"MIT"
] | 7
|
2020-11-13T18:57:32.000Z
|
2022-02-10T01:52:44.000Z
|
chapter_01/deep_q_learning.py
|
linklab/link_rl_book_codes
|
b272b46d5ecd2802f34648440ff53641c68cbbf0
|
[
"MIT"
] | 1
|
2021-09-07T12:41:33.000Z
|
2021-09-07T12:41:33.000Z
|
# https://www.deeplearningwizard.com/deep_learning/deep_reinforcement_learning_pytorch/dynamic_programming_frozenlake/
# -*- coding: utf-8 -*-
import time
import gym
import numpy as np
import matplotlib.pyplot as plt
import random
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import collections
np.set_printoptions(precision=3)
np.set_printoptions(suppress=True)
np.set_printoptions(formatter={'float': '{: 0.3f}'.format})
class Qnet(nn.Module):
def __init__(self):
super(Qnet, self).__init__()
self.fc1 = nn.Linear(4, 128)
self.fc2 = nn.Linear(128, 128)
self.fc3 = nn.Linear(128, 2)
def forward(self, x):
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
def get_action(self, obs, epsilon):
out = self.forward(obs)
coin = random.random()
if coin < epsilon:
return random.randint(0, 1)
else:
return out.argmax().item()
class ReplayMemory:
def __init__(self, buffer_limit=50000):
self.memory = collections.deque(maxlen=buffer_limit)
def put(self, transition):
self.memory.append(transition)
def size(self):
return len(self.memory)
def sample(self, n):
mini_batch = random.sample(self.memory, n)
observation_lst, action_lst, reward_lst, next_observation_lst, done_mask_lst = [], [], [], [], []
for transition in mini_batch:
observation, action, reward, next_observation, done_mask = transition
observation_lst.append(observation)
action_lst.append([action])
reward_lst.append([reward])
next_observation_lst.append(next_observation)
done_mask_lst.append([done_mask])
return torch.tensor(observation_lst, dtype=torch.float), torch.tensor(action_lst), \
torch.tensor(reward_lst), torch.tensor(next_observation_lst, dtype=torch.float), \
torch.tensor(done_mask_lst)
def q_learning(
env, num_episodes=1000, learning_rate=0.0001, gamma=0.99,
epsilon_start=0.2, epsilon_end=0.01, batch_size=32,
train_step_interval=4, target_update_step_interval=100,
print_episode_interval=10
):
q = Qnet()
q_target = Qnet()
q_target.load_state_dict(q.state_dict())
memory = ReplayMemory()
optimizer = optim.Adam(q.parameters(), lr=learning_rate)
episode_reward_list = []
training_steps = 0
last_episode_reward = 0
total_step_idx = 0
for i in range(num_episodes):
# Linear decaying from epsilon_start to epsilon_end
epsilon = max(epsilon_end, epsilon_start - (i / num_episodes))
episode_reward = 0 # cumulative_reward
# Environment 초기화와 변수 초기화
observation = env.reset()
#env.render()
# The Q-Table 알고리즘
while True:
total_step_idx += 1
# 가장 Q값이 높은 action을 결정함
action = q.get_action(torch.from_numpy(observation).float(), epsilon)
# action을 통해서 next_state, reward, done, info를 받아온다
next_observation, reward, done, _ = env.step(action)
#env.render()
done_mask = 0.0 if done else 1.0
memory.put(
(observation, action, reward / 100.0, next_observation, done_mask)
)
if memory.size() > 2000 and total_step_idx % train_step_interval == 0:
observation_t, action_t, reward_t, next_observation_t, done_mask_t = memory.sample(batch_size)
q_out = q(observation_t) # q_out.shape: (32, 2)
q_a = q_out.gather(dim=1, index=action_t) # q_a.shape: (32, 2)
q_prime_out = q_target(next_observation_t) # q_prime_out.shape: (32, 2)
max_q_prime = q_prime_out.max(dim=1)[0].unsqueeze(dim=-1) # max_q_prime.shape: (32, 1)
target = reward_t + gamma * max_q_prime * done_mask_t
loss = F.mse_loss(target, q_a)
optimizer.zero_grad()
loss.backward()
optimizer.step()
training_steps += 1 # Q 업데이트 횟수
episode_reward_list.append(last_episode_reward)
if total_step_idx % target_update_step_interval == 0:
q_target.load_state_dict(q.state_dict())
episode_reward += reward # episode_reward 를 산출하는 방법은 감가률 고려하지 않는 이 라인이 더 올바름.
observation = next_observation
if done:
last_episode_reward = episode_reward
break
if i % print_episode_interval == 0 and i != 0:
print("EPISODE: {0:3d}, EPISODE_REWARD: {1:5.1f}, SIZE_OF_REPLAY_BUFFER: {2:5d}, EPSILON: {3:.3f}".format(
i, episode_reward, memory.size(), epsilon
))
return training_steps, episode_reward_list
def q_testing(num_episodes, q):
episode_reward_list = []
for i in range(num_episodes):
episode_reward = 0 # cumulative_reward
pass # 숙제
episode_reward_list.append(episode_reward)
return np.average(episode_reward_list), np.std(episode_reward_list)
def main_env_info():
# https://github.com/openai/gym/blob/master/gym/envs/classic_control/cartpole.py
env = gym.make('CartPole-v1')
#####################
# observation space #
#####################
# Observation:
# Type: Box(4)
# Num Observation Min Max
# 0 Cart Position -4.8 4.8
# 1 Cart Velocity -Inf Inf
# 2 Pole Angle -0.418 rad (-24 deg) 0.418 rad (24 deg)
# 3 Pole Angular Velocity -Inf Inf
print("*" * 80)
print(env.observation_space)
# print(env.observation_space.n)
for i in range(10):
print(env.observation_space.sample())
print()
################
# action space #
################
# Actions:
# Type: Discrete(2)
# Num Action
# 0 Push cart to the left
# 1 Push cart to the right
print("*" * 80)
print(env.action_space)
print(env.action_space.n)
for i in range(10):
print(env.action_space.sample(), end=" ")
print()
print("*" * 80)
# Starting State:
# All observations are assigned a uniform random value in [-0.05..0.05]
observation = env.reset()
print(observation)
# Reward:
# Reward is 1 for every step taken, including the termination step
action = 0 # LEFT
next_observation, reward, done, info = env.step(action)
# Observation = 1: move to grid number 1 (unchanged)
# Prob = 1: deterministic policy, if we choose to go right, we'll go right
print("Observation: {0}, Action: {1}, Reward: {2}, Next Observation: {3}, Done: {4}, Info: {5}".format(
observation, action, reward, next_observation, done, info
))
observation = next_observation
action = 1 # RIGHT
next_observation, reward, done, info = env.step(action)
# Observation = 5: move to grid number 5 (unchanged)
print("Observation: {0}, Action: {1}, Reward: {2}, Next Observation: {3}, Done: {4}, Info: {5}".format(
observation, action, reward, next_observation, done, info
))
print("*" * 80)
# This sets the initial state at S, our starting point
# We can render the environment to see where we are on the 4x4 frozenlake gridworld
observation = env.reset()
actions = [0, 1] * 5
for action in actions:
next_observation, reward, done, info = env.step(action)
print("Observation: {0}, Action: {1}, Reward: {2}, Next Observation: {3}, Done: {4}, Info: {5}".format(
observation, action, reward, next_observation, done, info
))
observation = next_observation
env.close()
def main_q_learning():
NUM_EPISODES = 1000
LEARNING_RATE = 0.0001
GAMMA = 0.99
EPSILON_START = 0.2
EPSILON_END = 0.01
BATCH_SIZE = 32
TRAIN_STEP_INTERVAL = 4
TARGET_UPDATE_STEP_INTERVAL = 100
PRINT_EPISODE_INTERVAL = 10
env = gym.make('CartPole-v1')
training_steps, episode_reward_list = q_learning(
env,
NUM_EPISODES, LEARNING_RATE, GAMMA,
EPSILON_START, EPSILON_END, BATCH_SIZE, TRAIN_STEP_INTERVAL,
TARGET_UPDATE_STEP_INTERVAL, PRINT_EPISODE_INTERVAL
)
plt.plot(range(training_steps), episode_reward_list, color="Blue")
plt.xlabel("training steps")
plt.ylabel("episode reward")
plt.show()
if __name__ == "__main__":
main_env_info()
main_q_learning()
| 32.227941
| 118
| 0.606206
|
dfbbe226e5764b136040f65428623511e1877800
| 648
|
py
|
Python
|
app_sys/migrations/0001_initial.py
|
sivarki/hjarnuc
|
4acc9437af0f0fdc44d68dd0d6923e1039a4911b
|
[
"Apache-2.0"
] | null | null | null |
app_sys/migrations/0001_initial.py
|
sivarki/hjarnuc
|
4acc9437af0f0fdc44d68dd0d6923e1039a4911b
|
[
"Apache-2.0"
] | null | null | null |
app_sys/migrations/0001_initial.py
|
sivarki/hjarnuc
|
4acc9437af0f0fdc44d68dd0d6923e1039a4911b
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.1.1 on 2018-12-27 07:43
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='EnvSofeware',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sofeware_name', models.CharField(max_length=128)),
('sofeware_version', models.CharField(max_length=128, null=True)),
('install_script', models.TextField(null=True)),
],
),
]
| 27
| 114
| 0.589506
|
0506849b67adadd8b0f003adca5ec9ca79907b5a
| 20,275
|
py
|
Python
|
src/my_package/todelete/functions/functions_pytorch0.2/FilterInterpolationLayer.py
|
laomao0/AIM_DAIN
|
8322569498d675d3b2c1f35475c1299cad580bde
|
[
"MIT"
] | 3
|
2020-05-08T20:45:57.000Z
|
2021-01-18T11:32:38.000Z
|
src/my_package/todelete/functions/functions_pytorch0.2/FilterInterpolationLayer.py
|
laomao0/AIM_DAIN
|
8322569498d675d3b2c1f35475c1299cad580bde
|
[
"MIT"
] | null | null | null |
src/my_package/todelete/functions/functions_pytorch0.2/FilterInterpolationLayer.py
|
laomao0/AIM_DAIN
|
8322569498d675d3b2c1f35475c1299cad580bde
|
[
"MIT"
] | null | null | null |
# this is for wrapping the customized layer
import torch
from torch.autograd import Function
import _ext.my_lib as my_lib
#Please check how the STN FUNCTION is written :
#https://github.com/fxia22/stn.pytorch/blob/master/script/functions/gridgen.py
#https://github.com/fxia22/stn.pytorch/blob/master/script/functions/stn.py
class FilterInterpolationLayer(Function):
def __init__(self):
super(FilterInterpolationLayer,self).__init__()
def forward(self, input1,input2,input3):
# assert(input1.is_contiguous())
# assert(input2.is_contiguous())
self.input1 = input1.contiguous() # need to use in the backward process, so we need to cache it
self.input2 = input2.contiguous() # TODO: Note that this is simply a shallow copy?
self.input3 = input3.contiguous()
# if input1.is_cuda:
# self.device = torch.cuda.current_device()
# else:
# self.device = -1
# output = torch.zeros(input1.size())
if input1.is_cuda :
# output = output.cuda()
output = torch.cuda.FloatTensor().resize_(input1.size()).zero_()
my_lib.FilterInterpolationLayer_gpu_forward(input1, input2, input3, output)
else:
# output = torch.cuda.FloatTensor(input1.data.size())
my_lib.FilterInterpolationLayer_cpu_forward(input1, input2, input3, output)
# the function returns the output to its caller
return output
#TODO: if there are multiple outputs of this function, then the order should be well considered?
def backward(self, gradoutput):
# print("Backward of Filter Interpolation Layer")
# gradinput1 = input1.new().zero_()
# gradinput2 = input2.new().zero_()
# gradinput1 = torch.zeros(self.input1.size())
# gradinput2 = torch.zeros(self.input2.size())
# gradinput3 = torch.zeros(self.input3.size())
gradinput1 = torch.cuda.FloatTensor().resize_(self.input1.size()).zero_()
gradinput2 = torch.cuda.FloatTensor().resize_(self.input2.size()).zero_()
gradinput3 = torch.cuda.FloatTensor().resize_(self.input3.size()).zero_()
if self.input1.is_cuda:
# print("CUDA backward")
# gradinput1 = gradinput1.cuda(self.device)
# gradinput2 = gradinput2.cuda(self.device)
# gradinput3 = gradinput3.cuda(self.device)
err = my_lib.FilterInterpolationLayer_gpu_backward(self.input1,self.input2,self.input3, gradoutput, gradinput1, gradinput2, gradinput3)
if err != 0 :
print(err)
else:
# print("CPU backward")
# print(gradoutput)
err = my_lib.FilterInterpolationLayer_cpu_backward(self.input1, self.input2,self.input3, gradoutput, gradinput1, gradinput2, gradinput3)
# print(err)
if err != 0 :
print(err)
# print(gradinput1)
# print(gradinput2)
# print(gradinput1)
return gradinput1, gradinput2,gradinput3
# calculate the weights of flow
class WeightLayer(Function):
def __init__(self, lambda_e = 10.0/255.0, lambda_v = 1.0, Nw = 3):
#lambda_e = 10.0 , lambda_v = 1.0, Nw = 3,
super(WeightLayer,self).__init__()
self.lambda_e = lambda_e
self.lambda_v = lambda_v
self.Nw = Nw
# flow1_grad
def forward(self, input1,input2,input3):
# assert(input1.is_contiguous())
# assert(input2.is_contiguous())
self.input1 = input1.contiguous() # ref1 image
self.input2 = input2.contiguous() # ref2 image
self.input3 = input3.contiguous()
# self.flow1_grad = flow1_grad.contiguous() # ref1 flow's grad
if input1.is_cuda:
self.device = torch.cuda.current_device()
else:
self.device = -1
output = torch.zeros(input1.size(0), 1 , input1.size(2), input1.size(3))
if input1.is_cuda :
output = output.cuda()
err = my_lib.WeightLayer_gpu_forward(input1, input2, input3,
# flow1_grad,
output,
self.lambda_e, self.lambda_v, self.Nw
)
if err != 0 :
print(err)
else:
# output = torch.cuda.FloatTensor(input1.data.size())
err = my_lib.WeightLayer_cpu_forward(input1, input2, input3, output,
self.lambda_e , self.lambda_v, self.Nw
)
if err != 0 :
print(err)
self.output = output # save this for fast back propagation
# the function returns the output to its caller
return output
#TODO: if there are multiple outputs of this function, then the order should be well considered?
def backward(self, gradoutput):
# print("Backward of WeightLayer Layer")
# gradinput1 = input1.new().zero_()
# gradinput2 = input2.new().zero_()
gradinput1 = torch.zeros(self.input1.size())
gradinput2 = torch.zeros(self.input2.size())
gradinput3 = torch.zeros(self.input3.size())
# gradflow1_grad = torch.zeros(self.flow1_grad.size())
if self.input1.is_cuda:
#print("CUDA backward")
gradinput1 = gradinput1.cuda(self.device)
gradinput2 = gradinput2.cuda(self.device)
gradinput3 = gradinput3.cuda(self.device)
# gradflow1_grad = gradflow1_grad.cuda(self.device)
err = my_lib.WeightLayer_gpu_backward(
self.input1,self.input2,self.input3, self.output,
gradoutput,
gradinput1, gradinput2, gradinput3,
self.lambda_e, self.lambda_v, self.Nw
)
if err != 0 :
print(err)
else:
#print("CPU backward")
# print(gradoutput)
err = my_lib.WeightLayer_cpu_backward(
self.input1, self.input2,self.input3, self.output,
gradoutput,
gradinput1, gradinput2, gradinput3,
self.lambda_e, self.lambda_v, self.Nw
)
# print(err)
if err != 0 :
print(err)
# print(gradinput1)
# print(gradinput2)
# print("from 1:")
# print(gradinput3[0,0,...])
return gradinput1, gradinput2, gradinput3
class PixelValueLayer(Function):
def __init__(self, sigma_d = 3, tao_r = 0.05, Prowindow = 2 ):
super(PixelValueLayer,self).__init__()
self.sigma_d = sigma_d
self.tao_r = tao_r #maybe not useable
self.Prowindow = Prowindow
def forward(self, input1, input3, flow_weights):
# assert(input1.is_contiguous())
# assert(input2.is_contiguous())
self.input1 = input1.contiguous() # ref1 image
#self.input2 = input2.contiguous() # ref2 image
self.input3 = input3.contiguous() # ref1 flow
self.flow_weights = flow_weights.contiguous() # ref1 flow weights
if input1.is_cuda:
self.device = torch.cuda.current_device()
else:
self.device = -1
output = torch.zeros(input1.size())
if input1.is_cuda:
output = output.cuda()
err = my_lib.PixelValueLayer_gpu_forward(
input1, input3, flow_weights, output,
self.sigma_d, self.tao_r , self.Prowindow
)
if err != 0 :
print(err)
else:
# output = torch.cuda.FloatTensor(input1.data.size())
err = my_lib.PixelValueLayer_cpu_forward(
input1, input3, flow_weights, output,
self.sigma_d, self.tao_r , self.Prowindow
)
if err != 0 :
print(err)
# the function returns the output to its caller
return output
#TODO: if there are multiple outputs of this function, then the order should be well considered?
def backward(self, gradoutput):
# print("Backward of PixelValueLayer Layer")
# gradinput1 = input1.new().zero_()
# gradinput2 = input2.new().zero_()
gradinput1 = torch.zeros(self.input1.size())
#gradinput2 = torch.zeros(self.input2.size())
gradinput3 = torch.zeros(self.input3.size())
gradflow_weights = torch.zeros(self.flow_weights.size())
if self.input1.is_cuda:
# print("CUDA backward")
gradinput1 = gradinput1.cuda(self.device)
#gradinput2 = gradinput2.cuda(self.device)
gradinput3 = gradinput3.cuda(self.device)
gradflow_weights = gradflow_weights.cuda(self.device)
err = my_lib.PixelValueLayer_gpu_backward(
self.input1,self.input3, self.flow_weights,
gradoutput,
gradinput1, gradinput3, gradflow_weights,
self.sigma_d, self.tao_r , self.Prowindow
)
if err != 0 :
print(err)
else:
#print("CPU backward")
# print(gradoutput)
err = my_lib.PixelValueLayer_cpu_backward(
self.input1, self.input3, self.flow_weights,
gradoutput,
gradinput1, gradinput3, gradflow_weights,
self.sigma_d, self.tao_r , self.Prowindow
)
# print(err)
if err != 0 :
print(err)
# print(gradinput1)
# print(gradinput2)
# print("from 2:")
# print(gradinput3[0,0,...])
# print("Image grad:")
# print(gradinput1[0,:,:4,:4])
# print("Flow grad:")
# print(gradinput3[0,:,:4,:4])
# print("Flow_weights grad:")
# print(gradflow_weights[0,:,:4,:4])
return gradinput1, gradinput3, gradflow_weights
class PixelWeightLayer(Function):
def __init__(self,threshhold, sigma_d =3, tao_r =0.05, Prowindow = 2 ):
super(PixelWeightLayer,self).__init__()
self.threshhold = threshhold
self.sigma_d = sigma_d
self.tao_r = tao_r #maybe not useable
self.Prowindow = Prowindow
def forward(self, input3, flow_weights):
# assert(input1.is_contiguous())
# assert(input2.is_contiguous())
#self.input1 = input1.contiguous() # ref1 image
#self.input2 = input2.contiguous() # ref2 image
self.input3 = input3.contiguous() # ref1 flow
self.flow_weights = flow_weights.contiguous() # ref1 flow weights
if input3.is_cuda:
self.device = torch.cuda.current_device()
else:
self.device = -1
output = torch.zeros([input3.size(0), 1, input3.size(2), input3.size(3)])
if input3.is_cuda :
output = output.cuda()
err = my_lib.PixelWeightLayer_gpu_forward(
input3, flow_weights, output,
self.sigma_d, self.tao_r , self.Prowindow
)
if err != 0 :
print(err)
else:
# output = torch.cuda.FloatTensor(input1.data.size())
err = my_lib.PixelWeightLayer_cpu_forward(
input3, flow_weights, output,
self.sigma_d, self.tao_r , self.Prowindow
)
if err != 0 :
print(err)
self.output = output
# the function returns the output to its caller
return output
#TODO: if there are multiple outputs of this function, then the order should be well considered?
def backward(self, gradoutput):
# print("Backward of PixelWeightLayer Layer")
# gradinput1 = input1.new().zero_()
# gradinput2 = input2.new().zero_()
#gradinput1 = torch.zeros(self.input1.size())
#gradinput2 = torch.zeros(self.input2.size())
gradinput3 = torch.zeros(self.input3.size())
gradflow_weights = torch.zeros(self.flow_weights.size())
if self.input3.is_cuda:
# print("CUDA backward")
#gradinput1 = gradinput1.cuda(self.device)
#gradinput2 = gradinput2.cuda(self.device)
gradinput3 = gradinput3.cuda(self.device)
gradflow_weights = gradflow_weights.cuda(self.device)
err = my_lib.PixelWeightLayer_gpu_backward(
self.input3, self.flow_weights, self.output,
gradoutput,
gradinput3, gradflow_weights,
self.threshhold,
self.sigma_d, self.tao_r , self.Prowindow
)
if err != 0 :
print(err)
else:
# print("CPU backward")
# print(gradoutput)
err = my_lib.PixelWeightLayer_cpu_backward(
self.input3, self.flow_weights, self.output,
gradoutput,
gradinput3, gradflow_weights,
self.threshhold,
self.sigma_d, self.tao_r , self.Prowindow
)
# print(err)
if err != 0 :
print(err)
# print(gradinput1)
# print(gradinput2)
# print("from 3:")
# print(gradinput3[0,0,...])
return gradinput3, gradflow_weights
#class ReliableValueLayer(Function):
# def __init__(self, Nw =3, tao_r =0.05, Prowindow = 2 ):
# super(ReliableValueLayer,self).__init__()
#
# self.Nw = Nw
# self.tao_r = tao_r #maybe not useable
# self.Prowindow = Prowindow
#
# def forward(self, input3, flow_weight1):
#
# # assert(input1.is_contiguous())
# # assert(input2.is_contiguous())
# #self.input1 = input1.contiguous() # ref1 image
# #self.input2 = input2.contiguous() # ref2 image
# self.input3 = input3.contiguous() # ref1 flow
# self.flow_weight1 = flow_weight1.contiguous() # ref1 flow weights
#
# if input3.is_cuda:
# self.device = torch.cuda.current_device()
# else:
# self.device = -1
#
# output = torch.zeros([intpu3.size(0), 1, input3.size(2), input3.size(3)])
# #output2 = torch.zeros(input1.size())
# #weight1 = torch.zeros(input1.size())
# #weight2 = torch.zeros(input1.size())
#
#
# if input1.is_cuda :
# output = output.cuda()
# my_lib.ReliableValueLayer_gpu_forward(
# input3, flow_weight1, output,
# self.sigma_d, self.tao_r , self.Prowindow )
# else:
# # output = torch.cuda.FloatTensor(input1.data.size())
# my_lib.ReliableValueLayer_cpu_forward(
# input3, flow_weight1, output,
# self.sigma_d, self.tao_r , self.Prowindow )
#
# # the function returns the output to its caller
# return output
#
# #TODO: if there are multiple outputs of this function, then the order should be well considered?
# def backward(self, gradoutput):
# # print("Backward of Filter Interpolation Layer")
# # gradinput1 = input1.new().zero_()
# # gradinput2 = input2.new().zero_()
# #gradinput1 = torch.zeros(self.input1.size())
# #gradinput2 = torch.zeros(self.input2.size())
# gradinput3 = torch.zeros(self.input3.size())
# gradflow_weight1 = torch.zeros(self.flow_weight1.size())
#
# if self.input1.is_cuda:
# # print("CUDA backward")
# #gradinput1 = gradinput1.cuda(self.device)
# #gradinput2 = gradinput2.cuda(self.device)
# gradinput3 = gradinput3.cuda(self.device)
# gradflow_weight1 = gradflow_weight1.cuda(self.device)
#
# err = my_lib.ReliableValueLayer_gpu_backward(
# self.input3, self.flow_weight1, gradoutput,
# gradinput3, gradflow_weight1,
# self.sigma_d, self.tao_r , self.Prowindow )
# if err != 0 :
# print(err)
#
# else:
# # print("CPU backward")
# # print(gradoutput)
# err = my_lib.ReliableValueLayer_cpu_backward(
# self.input3,self.flow_weight1, gradoutput,
# gradinput3, gradflow_weight1,
# self.sigma_d, self.tao_r , self.Prowindow )
# # print(err)
# if err != 0 :
# print(err)
# # print(gradinput1)
# # print(gradinput2)
#
# # print(gradinput1)
#
# return gradinput3,gradflow_weight1
class ReliableWeightLayer(Function):
def __init__(self, threshhold, sigma_d =3, tao_r =0.05, Prowindow = 2 ):
super(ReliableWeightLayer,self).__init__()
self.threshhold = threshhold
self.sigma_d = sigma_d
self.tao_r = tao_r #maybe not useable
self.Prowindow = Prowindow
def forward(self, input3):
# assert(input1.is_contiguous())
# assert(input2.is_contiguous())
#self.input1 = input1.contiguous() # ref1 image
#self.input2 = input2.contiguous() # ref2 image
self.input3 = input3.contiguous() # ref1 flow
#self.flow_weight1 = flow_weight1.contiguous() # ref1 flow weights
if input3.is_cuda:
self.device = torch.cuda.current_device()
else:
self.device = -1
output = torch.zeros([input3.size(0), 1, input3.size(2), input3.size(3)] )
#output2 = torch.zeros(input1.size())
#weight1 = torch.zeros(input1.size())
#weight2 = torch.zeros(input1.size())
if input3.is_cuda :
output = output.cuda()
err = my_lib.ReliableWeightLayer_gpu_forward(
input3, output,
self.sigma_d, self.tao_r , self.Prowindow
)
if err != 0 :
print(err)
else:
# output = torch.cuda.FloatTensor(input1.data.size())
err = my_lib.ReliableWeightLayer_cpu_forward(
input3, output,
self.sigma_d, self.tao_r , self.Prowindow
)
if err != 0 :
print(err)
self.output= output # used for inihibiting some unreliable gradients.
# the function returns the output to its caller
return output
#TODO: if there are multiple outputs of this function, then the order should be well considered?
def backward(self, gradoutput):
#print("Backward of ReliableWeightLayer Layer")
# gradinput1 = input1.new().zero_()
# gradinput2 = input2.new().zero_()
#gradinput1 = torch.zeros(self.input1.size())
#gradinput2 = torch.zeros(self.input2.size())
gradinput3 = torch.zeros(self.input3.size())
#gradflow_weight1 = torch.zeros(self.flow_weight1.size())
if self.input3.is_cuda:
#print("CUDA backward")
#gradinput1 = gradinput1.cuda(self.device)
#gradinput2 = gradinput2.cuda(self.device)
gradinput3 = gradinput3.cuda(self.device)
#gradflow_weight1 = gradflow_weight1.cuda(self.device)
err = my_lib.ReliableWeightLayer_gpu_backward(
self.input3, self.output,
gradoutput,
gradinput3,
self.threshhold,
self.sigma_d, self.tao_r , self.Prowindow
)
if err != 0 :
print(err)
else:
# print("CPU backward")
# print(gradoutput)
err = my_lib.ReliableWeightLayer_cpu_backward(
self.input3, self.output,
gradoutput,
gradinput3,
self.threshhold,
self.sigma_d, self.tao_r , self.Prowindow
)
# print(err)
if err != 0 :
print(err)
# print(gradinput1)
# print(gradinput2)
# print("from 4:")
# print(gradinput3[0,0,...])
return gradinput3
| 38.254717
| 148
| 0.56656
|
75913b8e7103233097e3bfbbe5ffc4e83c2085d4
| 3,521
|
py
|
Python
|
imix/models/vqa_models/vilbert/task_utils.py
|
linxi1158/iMIX
|
af87a17275f02c94932bb2e29f132a84db812002
|
[
"Apache-2.0"
] | 23
|
2021-06-26T08:45:19.000Z
|
2022-03-02T02:13:33.000Z
|
imix/models/vqa_models/vilbert/task_utils.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | null | null | null |
imix/models/vqa_models/vilbert/task_utils.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | 9
|
2021-06-10T02:36:20.000Z
|
2021-11-09T02:18:16.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import torch
from torch.utils.data import Dataset
from transformers.tokenization_bert import BertTokenizer
from .datasets import DatasetMapTrain
from .datasets._image_features_reader import ImageFeaturesH5Reader
from imix.data.builder import DATASETS
logger = logging.getLogger(__name__)
@DATASETS.register_module()
class LoadDatasets(Dataset):
def __init__(self, reader):
task_cfg = reader
tokenizer = BertTokenizer.from_pretrained(task_cfg.bert_model, do_lower_case=task_cfg.do_lower_case)
task_feature_reader1 = {}
task_feature_reader2 = {}
self.task = []
self._limit_sample_nums = task_cfg.get('limit_nums', None)
is_train = task_cfg.get('is_train', False)
ids = task_cfg.tasks.split('-')
for i, task_id in enumerate(ids):
task = 'TASK' + task_id
self.task.append(task)
cfg = task_cfg.TASKS[task]
if cfg.features_h5path1 not in task_feature_reader1:
task_feature_reader1[cfg.features_h5path1] = None
if cfg.features_h5path2 not in task_feature_reader2:
task_feature_reader2[cfg.features_h5path2] = None
# initilzie the feature reader
for features_h5path in task_feature_reader1.keys():
if features_h5path != '':
task_feature_reader1[features_h5path] = ImageFeaturesH5Reader(features_h5path, task_cfg.in_memory)
for features_h5path in task_feature_reader2.keys():
if features_h5path != '':
task_feature_reader2[features_h5path] = ImageFeaturesH5Reader(features_h5path, task_cfg.in_memory)
self.task_datasets = {}
# only one task now
for i, task_id in enumerate(ids):
task = 'TASK' + task_id
cfg = task_cfg.TASKS[task]
task_name = cfg.name
if is_train:
split = cfg.train_split
annotations_jsonpath = cfg.train_annotations_jsonpath
else:
split = cfg.val_split
annotations_jsonpath = cfg.val_annotations_jsonpath
self.task_datasets[task] = DatasetMapTrain[task_name](
task=cfg.name,
dataroot=cfg.dataroot,
annotations_jsonpath=annotations_jsonpath,
split=split,
image_features_reader=task_feature_reader1[cfg.features_h5path1],
gt_image_features_reader=task_feature_reader2[cfg.features_h5path2],
tokenizer=tokenizer,
bert_model=task_cfg.bert_model,
clean_datasets=task_cfg.clean_datasets,
padding_index=0,
max_seq_length=cfg.max_seq_length,
max_region_num=cfg.max_region_num)
# limit_nums=self._limit_sample_nums)
def __len__(self):
# only one task now
return self.task_datasets[self.task[0]].__len__()
def __getitem__(self, item: int):
# only one task now
return self.task_datasets[self.task[0]].__getitem__(item)
def compute_score_with_logits(logits, labels):
logits = torch.max(logits, 1)[1].data # argmax
one_hots = torch.zeros(*labels.size()).cuda()
one_hots.scatter_(1, logits.view(-1, 1), 1)
scores = one_hots * labels
return scores
| 37.860215
| 114
| 0.654928
|
c3fca418cbf0abd11fc043409da6aff123a9ac45
| 3,105
|
py
|
Python
|
original/BIOMD0000000633/validation.py
|
sys-bio/temp-biomodels
|
596eebb590d72e74419773f4e9b829a62d7fff9a
|
[
"CC0-1.0"
] | null | null | null |
original/BIOMD0000000633/validation.py
|
sys-bio/temp-biomodels
|
596eebb590d72e74419773f4e9b829a62d7fff9a
|
[
"CC0-1.0"
] | 5
|
2022-03-30T21:33:45.000Z
|
2022-03-31T20:08:15.000Z
|
original/BIOMD0000000633/validation.py
|
sys-bio/temp-biomodels
|
596eebb590d72e74419773f4e9b829a62d7fff9a
|
[
"CC0-1.0"
] | null | null | null |
import matplotlib.pyplot as plt, numpy as np
from SloppyCell.ReactionNetworks import *
# Corresponds to fed liver
net = IO.from_SBML_file('MODEL1603030000_original.xml')
normal = net.copy('normal')
fasted = net.copy('fasted')
adjustments_normal = {'vmax_fbp1':1.00, 'vmax_fbp2':0.66, 'v0_gk':0.48,
'vmax_glct_er':1.00, 'v0_gp':0.89, 'vmax_g6p_er':1.41,
'vmax_g6pt_er':1.41, 'v0_gs':1.00, 'vmax_PC':1.25,
'vmax_pepck':1.41, 'vmax_pepck_mito':1.41,
'vmax_pfk1':0.76, 'vmax_pfk2':0.66, 'vmax_pk':0.58}
for var, adj in adjustments_normal.items():
normal.set_var_ic(var, net.get_var_ic(var)*adj)
adjustments_fasted = {'vmax_fbp1':1.00, 'vmax_fbp2':0.43, 'v0_gk':0.23,
'vmax_glct_er':1.00, 'v0_gp':0.80, 'vmax_g6p_er':2.00,
'vmax_g6pt_er':2.00, 'v0_gs':1.00, 'vmax_PC':1.56,
'vmax_pepck':2.00, 'vmax_pepck_mito':2.00,
'vmax_pfk1':0.57, 'vmax_pfk2':0.43, 'vmax_pk':0.33}
for var, adj in adjustments_fasted.items():
fasted.set_var_ic(var, net.get_var_ic(var)*adj)
traj = Dynamics.integrate(net, [0, 24*60])
traj_normal = Dynamics.integrate(net, [0, 24*60])
monitored = ['dhap', 'fru6p', 'glc1p', 'glc6p', 'lac', 'mal', 'mal_mito', 'oa', 'pep',
'pg2', 'pg3', 'pyr']
# Also pyr_mito, pep_mito, oa_mito, mal_mito, glc6p_er
vals, fluxes = [], []
vals_normal, fluxes_normal = [], []
vals_fasted, fluxes_fasted = [], []
glc_ext_l = np.linspace(3,12,10)
for glc_ext in glc_ext_l:
net.set_var_ic('glc_ext', glc_ext)
traj = Dynamics.integrate(net, [0, 24*100])
these_vals = [traj.get_var_val_index(var, -1) for var in monitored]
vals.append(these_vals)
fluxes.append(traj.get_var_val_index('v_GLUT2', -1))
normal.set_var_ic('glc_ext', glc_ext)
traj_normal = Dynamics.integrate(normal, [0, 24*100])
these_vals = [traj_normal.get_var_val_index(var, -1) for var in monitored]
vals_normal.append(these_vals)
fluxes_normal.append(traj_normal.get_var_val_index('v_GLUT2', -1))
fasted.set_var_ic('glc_ext', glc_ext)
traj_fasted = Dynamics.integrate(fasted, [0, 24*100])
these_vals = [traj_fasted.get_var_val_index(var, -1) for var in monitored]
vals_fasted.append(these_vals)
fluxes_fasted.append(traj_fasted.get_var_val_index('v_GLUT2', -1))
fig = plt.figure(4353)
fig.clear()
ax = fig.add_subplot(1,1,1)
for these_vals in vals:
ax.plot(these_vals, 'o')
ax.set_xticks(range(len(monitored)))
ax.set_xticklabels(monitored, rotation=90)
ax.set_ylim(0, 3.5)
ax.set_xlim(-0.5, len(monitored) + 0.5)
fig = plt.figure(4360)
fig.clear()
ax = fig.add_subplot(1,1,1)
ax.plot(glc_ext_l, fluxes, '-ok')
ax.plot(glc_ext_l, fluxes_normal, '-ok')
ax.plot(glc_ext_l, fluxes_fasted, '-ok')
ax.set_xlim(3,12)
ax.set_ylim(-100, 250)
ax.axhspan(-100, 0, color='lightblue')
ax.axhspan(0, 250, color='pink')
ax.set_xlabel('Plasma glucose [mM]')
ax.set_ylabel('Glc exchange rate [umol/g/h]')
fig.suptitle('Figure 6', fontsize='x-large')
fig.savefig('validation1.png')
plt.show()
| 38.333333
| 86
| 0.663446
|
7f41b0a803b1b054f8ac7effac4006914ffa79f8
| 773,763
|
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_09_01/models/_models.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 8
|
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_09_01/models/_models.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 4
|
2019-04-17T17:57:49.000Z
|
2020-04-24T21:11:22.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_09_01/models/_models.py
|
vbarbaresi/azure-sdk-for-python
|
397ba46c51d001ff89c66b170f5576cf8f49c05f
|
[
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.core.exceptions import HttpResponseError
import msrest.serialization
class AadAuthenticationParameters(msrest.serialization.Model):
"""AAD Vpn authentication type related parameters.
:param aad_tenant: AAD Vpn authentication parameter AAD tenant.
:type aad_tenant: str
:param aad_audience: AAD Vpn authentication parameter AAD audience.
:type aad_audience: str
:param aad_issuer: AAD Vpn authentication parameter AAD issuer.
:type aad_issuer: str
"""
_attribute_map = {
'aad_tenant': {'key': 'aadTenant', 'type': 'str'},
'aad_audience': {'key': 'aadAudience', 'type': 'str'},
'aad_issuer': {'key': 'aadIssuer', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AadAuthenticationParameters, self).__init__(**kwargs)
self.aad_tenant = kwargs.get('aad_tenant', None)
self.aad_audience = kwargs.get('aad_audience', None)
self.aad_issuer = kwargs.get('aad_issuer', None)
class AddressSpace(msrest.serialization.Model):
"""AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network.
:param address_prefixes: A list of address blocks reserved for this virtual network in CIDR
notation.
:type address_prefixes: list[str]
"""
_attribute_map = {
'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AddressSpace, self).__init__(**kwargs)
self.address_prefixes = kwargs.get('address_prefixes', None)
class Resource(msrest.serialization.Model):
"""Common resource representation.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = None
self.type = None
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
class ApplicationGateway(Resource):
"""Application gateway resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param zones: A list of availability zones denoting where the resource needs to come from.
:type zones: list[str]
:param identity: The identity of the application gateway, if configured.
:type identity: ~azure.mgmt.network.v2019_09_01.models.ManagedServiceIdentity
:param sku: SKU of the application gateway resource.
:type sku: ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySku
:param ssl_policy: SSL policy of the application gateway resource.
:type ssl_policy: ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslPolicy
:ivar operational_state: Operational state of the application gateway resource. Possible values
include: "Stopped", "Starting", "Running", "Stopping".
:vartype operational_state: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayOperationalState
:param gateway_ip_configurations: Subnets of the application gateway resource. For default
limits, see `Application Gateway limits <https://docs.microsoft.com/azure/azure-subscription-
service-limits#application-gateway-limits>`_.
:type gateway_ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayIPConfiguration]
:param authentication_certificates: Authentication certificates of the application gateway
resource. For default limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-
limits>`_.
:type authentication_certificates:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayAuthenticationCertificate]
:param trusted_root_certificates: Trusted Root certificates of the application gateway
resource. For default limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-
limits>`_.
:type trusted_root_certificates:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayTrustedRootCertificate]
:param ssl_certificates: SSL certificates of the application gateway resource. For default
limits, see `Application Gateway limits <https://docs.microsoft.com/azure/azure-subscription-
service-limits#application-gateway-limits>`_.
:type ssl_certificates:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslCertificate]
:param frontend_ip_configurations: Frontend IP addresses of the application gateway resource.
For default limits, see `Application Gateway limits <https://docs.microsoft.com/azure/azure-
subscription-service-limits#application-gateway-limits>`_.
:type frontend_ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayFrontendIPConfiguration]
:param frontend_ports: Frontend ports of the application gateway resource. For default limits,
see `Application Gateway limits <https://docs.microsoft.com/azure/azure-subscription-service-
limits#application-gateway-limits>`_.
:type frontend_ports:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayFrontendPort]
:param probes: Probes of the application gateway resource.
:type probes: list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayProbe]
:param backend_address_pools: Backend address pool of the application gateway resource. For
default limits, see `Application Gateway limits <https://docs.microsoft.com/azure/azure-
subscription-service-limits#application-gateway-limits>`_.
:type backend_address_pools:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendAddressPool]
:param backend_http_settings_collection: Backend http settings of the application gateway
resource. For default limits, see `Application Gateway limits
<https://docs.microsoft.com/azure/azure-subscription-service-limits#application-gateway-
limits>`_.
:type backend_http_settings_collection:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendHttpSettings]
:param http_listeners: Http listeners of the application gateway resource. For default limits,
see `Application Gateway limits <https://docs.microsoft.com/azure/azure-subscription-service-
limits#application-gateway-limits>`_.
:type http_listeners:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayHttpListener]
:param url_path_maps: URL path map of the application gateway resource. For default limits, see
`Application Gateway limits <https://docs.microsoft.com/azure/azure-subscription-service-
limits#application-gateway-limits>`_.
:type url_path_maps: list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayUrlPathMap]
:param request_routing_rules: Request routing rules of the application gateway resource.
:type request_routing_rules:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayRequestRoutingRule]
:param rewrite_rule_sets: Rewrite rules for the application gateway resource.
:type rewrite_rule_sets:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayRewriteRuleSet]
:param redirect_configurations: Redirect configurations of the application gateway resource.
For default limits, see `Application Gateway limits <https://docs.microsoft.com/azure/azure-
subscription-service-limits#application-gateway-limits>`_.
:type redirect_configurations:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayRedirectConfiguration]
:param web_application_firewall_configuration: Web application firewall configuration.
:type web_application_firewall_configuration:
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayWebApplicationFirewallConfiguration
:param firewall_policy: Reference of the FirewallPolicy resource.
:type firewall_policy: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param enable_http2: Whether HTTP2 is enabled on the application gateway resource.
:type enable_http2: bool
:param enable_fips: Whether FIPS is enabled on the application gateway resource.
:type enable_fips: bool
:param autoscale_configuration: Autoscale Configuration.
:type autoscale_configuration:
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayAutoscaleConfiguration
:ivar resource_guid: The resource GUID property of the application gateway resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the application gateway resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param custom_error_configurations: Custom error configurations of the application gateway
resource.
:type custom_error_configurations:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayCustomError]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'operational_state': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'zones': {'key': 'zones', 'type': '[str]'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'sku': {'key': 'properties.sku', 'type': 'ApplicationGatewaySku'},
'ssl_policy': {'key': 'properties.sslPolicy', 'type': 'ApplicationGatewaySslPolicy'},
'operational_state': {'key': 'properties.operationalState', 'type': 'str'},
'gateway_ip_configurations': {'key': 'properties.gatewayIPConfigurations', 'type': '[ApplicationGatewayIPConfiguration]'},
'authentication_certificates': {'key': 'properties.authenticationCertificates', 'type': '[ApplicationGatewayAuthenticationCertificate]'},
'trusted_root_certificates': {'key': 'properties.trustedRootCertificates', 'type': '[ApplicationGatewayTrustedRootCertificate]'},
'ssl_certificates': {'key': 'properties.sslCertificates', 'type': '[ApplicationGatewaySslCertificate]'},
'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[ApplicationGatewayFrontendIPConfiguration]'},
'frontend_ports': {'key': 'properties.frontendPorts', 'type': '[ApplicationGatewayFrontendPort]'},
'probes': {'key': 'properties.probes', 'type': '[ApplicationGatewayProbe]'},
'backend_address_pools': {'key': 'properties.backendAddressPools', 'type': '[ApplicationGatewayBackendAddressPool]'},
'backend_http_settings_collection': {'key': 'properties.backendHttpSettingsCollection', 'type': '[ApplicationGatewayBackendHttpSettings]'},
'http_listeners': {'key': 'properties.httpListeners', 'type': '[ApplicationGatewayHttpListener]'},
'url_path_maps': {'key': 'properties.urlPathMaps', 'type': '[ApplicationGatewayUrlPathMap]'},
'request_routing_rules': {'key': 'properties.requestRoutingRules', 'type': '[ApplicationGatewayRequestRoutingRule]'},
'rewrite_rule_sets': {'key': 'properties.rewriteRuleSets', 'type': '[ApplicationGatewayRewriteRuleSet]'},
'redirect_configurations': {'key': 'properties.redirectConfigurations', 'type': '[ApplicationGatewayRedirectConfiguration]'},
'web_application_firewall_configuration': {'key': 'properties.webApplicationFirewallConfiguration', 'type': 'ApplicationGatewayWebApplicationFirewallConfiguration'},
'firewall_policy': {'key': 'properties.firewallPolicy', 'type': 'SubResource'},
'enable_http2': {'key': 'properties.enableHttp2', 'type': 'bool'},
'enable_fips': {'key': 'properties.enableFips', 'type': 'bool'},
'autoscale_configuration': {'key': 'properties.autoscaleConfiguration', 'type': 'ApplicationGatewayAutoscaleConfiguration'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'custom_error_configurations': {'key': 'properties.customErrorConfigurations', 'type': '[ApplicationGatewayCustomError]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGateway, self).__init__(**kwargs)
self.etag = None
self.zones = kwargs.get('zones', None)
self.identity = kwargs.get('identity', None)
self.sku = kwargs.get('sku', None)
self.ssl_policy = kwargs.get('ssl_policy', None)
self.operational_state = None
self.gateway_ip_configurations = kwargs.get('gateway_ip_configurations', None)
self.authentication_certificates = kwargs.get('authentication_certificates', None)
self.trusted_root_certificates = kwargs.get('trusted_root_certificates', None)
self.ssl_certificates = kwargs.get('ssl_certificates', None)
self.frontend_ip_configurations = kwargs.get('frontend_ip_configurations', None)
self.frontend_ports = kwargs.get('frontend_ports', None)
self.probes = kwargs.get('probes', None)
self.backend_address_pools = kwargs.get('backend_address_pools', None)
self.backend_http_settings_collection = kwargs.get('backend_http_settings_collection', None)
self.http_listeners = kwargs.get('http_listeners', None)
self.url_path_maps = kwargs.get('url_path_maps', None)
self.request_routing_rules = kwargs.get('request_routing_rules', None)
self.rewrite_rule_sets = kwargs.get('rewrite_rule_sets', None)
self.redirect_configurations = kwargs.get('redirect_configurations', None)
self.web_application_firewall_configuration = kwargs.get('web_application_firewall_configuration', None)
self.firewall_policy = kwargs.get('firewall_policy', None)
self.enable_http2 = kwargs.get('enable_http2', None)
self.enable_fips = kwargs.get('enable_fips', None)
self.autoscale_configuration = kwargs.get('autoscale_configuration', None)
self.resource_guid = None
self.provisioning_state = None
self.custom_error_configurations = kwargs.get('custom_error_configurations', None)
class SubResource(msrest.serialization.Model):
"""Reference to another subresource.
:param id: Resource ID.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubResource, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class ApplicationGatewayAuthenticationCertificate(SubResource):
"""Authentication certificates of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the authentication certificate that is unique within an Application
Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param data: Certificate public data.
:type data: str
:ivar provisioning_state: The provisioning state of the authentication certificate resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'data': {'key': 'properties.data', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAuthenticationCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.data = kwargs.get('data', None)
self.provisioning_state = None
class ApplicationGatewayAutoscaleConfiguration(msrest.serialization.Model):
"""Application Gateway autoscale configuration.
All required parameters must be populated in order to send to Azure.
:param min_capacity: Required. Lower bound on number of Application Gateway capacity.
:type min_capacity: int
:param max_capacity: Upper bound on number of Application Gateway capacity.
:type max_capacity: int
"""
_validation = {
'min_capacity': {'required': True, 'minimum': 0},
'max_capacity': {'minimum': 2},
}
_attribute_map = {
'min_capacity': {'key': 'minCapacity', 'type': 'int'},
'max_capacity': {'key': 'maxCapacity', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAutoscaleConfiguration, self).__init__(**kwargs)
self.min_capacity = kwargs['min_capacity']
self.max_capacity = kwargs.get('max_capacity', None)
class ApplicationGatewayAvailableSslOptions(Resource):
"""Response for ApplicationGatewayAvailableSslOptions API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param predefined_policies: List of available Ssl predefined policy.
:type predefined_policies: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param default_policy: Name of the Ssl predefined policy applied by default to application
gateway. Possible values include: "AppGwSslPolicy20150501", "AppGwSslPolicy20170401",
"AppGwSslPolicy20170401S".
:type default_policy: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslPolicyName
:param available_cipher_suites: List of available Ssl cipher suites.
:type available_cipher_suites: list[str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslCipherSuite]
:param available_protocols: List of available Ssl protocols.
:type available_protocols: list[str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslProtocol]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'predefined_policies': {'key': 'properties.predefinedPolicies', 'type': '[SubResource]'},
'default_policy': {'key': 'properties.defaultPolicy', 'type': 'str'},
'available_cipher_suites': {'key': 'properties.availableCipherSuites', 'type': '[str]'},
'available_protocols': {'key': 'properties.availableProtocols', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAvailableSslOptions, self).__init__(**kwargs)
self.predefined_policies = kwargs.get('predefined_policies', None)
self.default_policy = kwargs.get('default_policy', None)
self.available_cipher_suites = kwargs.get('available_cipher_suites', None)
self.available_protocols = kwargs.get('available_protocols', None)
class ApplicationGatewayAvailableSslPredefinedPolicies(msrest.serialization.Model):
"""Response for ApplicationGatewayAvailableSslOptions API service call.
:param value: List of available Ssl predefined policy.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslPredefinedPolicy]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ApplicationGatewaySslPredefinedPolicy]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAvailableSslPredefinedPolicies, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ApplicationGatewayAvailableWafRuleSetsResult(msrest.serialization.Model):
"""Response for ApplicationGatewayAvailableWafRuleSets API service call.
:param value: The list of application gateway rule sets.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayFirewallRuleSet]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ApplicationGatewayFirewallRuleSet]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayAvailableWafRuleSetsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class ApplicationGatewayBackendAddress(msrest.serialization.Model):
"""Backend address of an application gateway.
:param fqdn: Fully qualified domain name (FQDN).
:type fqdn: str
:param ip_address: IP address.
:type ip_address: str
"""
_attribute_map = {
'fqdn': {'key': 'fqdn', 'type': 'str'},
'ip_address': {'key': 'ipAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendAddress, self).__init__(**kwargs)
self.fqdn = kwargs.get('fqdn', None)
self.ip_address = kwargs.get('ip_address', None)
class ApplicationGatewayBackendAddressPool(SubResource):
"""Backend Address Pool of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the backend address pool that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:ivar backend_ip_configurations: Collection of references to IPs defined in network interfaces.
:vartype backend_ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceIPConfiguration]
:param backend_addresses: Backend addresses.
:type backend_addresses:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendAddress]
:ivar provisioning_state: The provisioning state of the backend address pool resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'backend_ip_configurations': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'backend_ip_configurations': {'key': 'properties.backendIPConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'},
'backend_addresses': {'key': 'properties.backendAddresses', 'type': '[ApplicationGatewayBackendAddress]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendAddressPool, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.backend_ip_configurations = None
self.backend_addresses = kwargs.get('backend_addresses', None)
self.provisioning_state = None
class ApplicationGatewayBackendHealth(msrest.serialization.Model):
"""Response for ApplicationGatewayBackendHealth API service call.
:param backend_address_pools: A list of ApplicationGatewayBackendHealthPool resources.
:type backend_address_pools:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendHealthPool]
"""
_attribute_map = {
'backend_address_pools': {'key': 'backendAddressPools', 'type': '[ApplicationGatewayBackendHealthPool]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealth, self).__init__(**kwargs)
self.backend_address_pools = kwargs.get('backend_address_pools', None)
class ApplicationGatewayBackendHealthHttpSettings(msrest.serialization.Model):
"""Application gateway BackendHealthHttp settings.
:param backend_http_settings: Reference of an ApplicationGatewayBackendHttpSettings resource.
:type backend_http_settings:
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendHttpSettings
:param servers: List of ApplicationGatewayBackendHealthServer resources.
:type servers:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendHealthServer]
"""
_attribute_map = {
'backend_http_settings': {'key': 'backendHttpSettings', 'type': 'ApplicationGatewayBackendHttpSettings'},
'servers': {'key': 'servers', 'type': '[ApplicationGatewayBackendHealthServer]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealthHttpSettings, self).__init__(**kwargs)
self.backend_http_settings = kwargs.get('backend_http_settings', None)
self.servers = kwargs.get('servers', None)
class ApplicationGatewayBackendHealthOnDemand(msrest.serialization.Model):
"""Result of on demand test probe.
:param backend_address_pool: Reference of an ApplicationGatewayBackendAddressPool resource.
:type backend_address_pool:
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendAddressPool
:param backend_health_http_settings: Application gateway BackendHealthHttp settings.
:type backend_health_http_settings:
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendHealthHttpSettings
"""
_attribute_map = {
'backend_address_pool': {'key': 'backendAddressPool', 'type': 'ApplicationGatewayBackendAddressPool'},
'backend_health_http_settings': {'key': 'backendHealthHttpSettings', 'type': 'ApplicationGatewayBackendHealthHttpSettings'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealthOnDemand, self).__init__(**kwargs)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_health_http_settings = kwargs.get('backend_health_http_settings', None)
class ApplicationGatewayBackendHealthPool(msrest.serialization.Model):
"""Application gateway BackendHealth pool.
:param backend_address_pool: Reference of an ApplicationGatewayBackendAddressPool resource.
:type backend_address_pool:
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendAddressPool
:param backend_http_settings_collection: List of ApplicationGatewayBackendHealthHttpSettings
resources.
:type backend_http_settings_collection:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendHealthHttpSettings]
"""
_attribute_map = {
'backend_address_pool': {'key': 'backendAddressPool', 'type': 'ApplicationGatewayBackendAddressPool'},
'backend_http_settings_collection': {'key': 'backendHttpSettingsCollection', 'type': '[ApplicationGatewayBackendHealthHttpSettings]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealthPool, self).__init__(**kwargs)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_http_settings_collection = kwargs.get('backend_http_settings_collection', None)
class ApplicationGatewayBackendHealthServer(msrest.serialization.Model):
"""Application gateway backendhealth http settings.
:param address: IP address or FQDN of backend server.
:type address: str
:param ip_configuration: Reference of IP configuration of backend server.
:type ip_configuration: ~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceIPConfiguration
:param health: Health of backend server. Possible values include: "Unknown", "Up", "Down",
"Partial", "Draining".
:type health: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendHealthServerHealth
:param health_probe_log: Health Probe Log.
:type health_probe_log: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
'ip_configuration': {'key': 'ipConfiguration', 'type': 'NetworkInterfaceIPConfiguration'},
'health': {'key': 'health', 'type': 'str'},
'health_probe_log': {'key': 'healthProbeLog', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHealthServer, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
self.ip_configuration = kwargs.get('ip_configuration', None)
self.health = kwargs.get('health', None)
self.health_probe_log = kwargs.get('health_probe_log', None)
class ApplicationGatewayBackendHttpSettings(SubResource):
"""Backend address pool settings of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the backend http settings that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param port: The destination port on the backend.
:type port: int
:param protocol: The protocol used to communicate with the backend. Possible values include:
"Http", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayProtocol
:param cookie_based_affinity: Cookie based affinity. Possible values include: "Enabled",
"Disabled".
:type cookie_based_affinity: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayCookieBasedAffinity
:param request_timeout: Request timeout in seconds. Application Gateway will fail the request
if response is not received within RequestTimeout. Acceptable values are from 1 second to 86400
seconds.
:type request_timeout: int
:param probe: Probe resource of an application gateway.
:type probe: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param authentication_certificates: Array of references to application gateway authentication
certificates.
:type authentication_certificates: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param trusted_root_certificates: Array of references to application gateway trusted root
certificates.
:type trusted_root_certificates: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param connection_draining: Connection draining of the backend http settings resource.
:type connection_draining:
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayConnectionDraining
:param host_name: Host header to be sent to the backend servers.
:type host_name: str
:param pick_host_name_from_backend_address: Whether to pick host header should be picked from
the host name of the backend server. Default value is false.
:type pick_host_name_from_backend_address: bool
:param affinity_cookie_name: Cookie name to use for the affinity cookie.
:type affinity_cookie_name: str
:param probe_enabled: Whether the probe is enabled. Default value is false.
:type probe_enabled: bool
:param path: Path which should be used as a prefix for all HTTP requests. Null means no path
will be prefixed. Default value is null.
:type path: str
:ivar provisioning_state: The provisioning state of the backend HTTP settings resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'cookie_based_affinity': {'key': 'properties.cookieBasedAffinity', 'type': 'str'},
'request_timeout': {'key': 'properties.requestTimeout', 'type': 'int'},
'probe': {'key': 'properties.probe', 'type': 'SubResource'},
'authentication_certificates': {'key': 'properties.authenticationCertificates', 'type': '[SubResource]'},
'trusted_root_certificates': {'key': 'properties.trustedRootCertificates', 'type': '[SubResource]'},
'connection_draining': {'key': 'properties.connectionDraining', 'type': 'ApplicationGatewayConnectionDraining'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'pick_host_name_from_backend_address': {'key': 'properties.pickHostNameFromBackendAddress', 'type': 'bool'},
'affinity_cookie_name': {'key': 'properties.affinityCookieName', 'type': 'str'},
'probe_enabled': {'key': 'properties.probeEnabled', 'type': 'bool'},
'path': {'key': 'properties.path', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayBackendHttpSettings, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.port = kwargs.get('port', None)
self.protocol = kwargs.get('protocol', None)
self.cookie_based_affinity = kwargs.get('cookie_based_affinity', None)
self.request_timeout = kwargs.get('request_timeout', None)
self.probe = kwargs.get('probe', None)
self.authentication_certificates = kwargs.get('authentication_certificates', None)
self.trusted_root_certificates = kwargs.get('trusted_root_certificates', None)
self.connection_draining = kwargs.get('connection_draining', None)
self.host_name = kwargs.get('host_name', None)
self.pick_host_name_from_backend_address = kwargs.get('pick_host_name_from_backend_address', None)
self.affinity_cookie_name = kwargs.get('affinity_cookie_name', None)
self.probe_enabled = kwargs.get('probe_enabled', None)
self.path = kwargs.get('path', None)
self.provisioning_state = None
class ApplicationGatewayConnectionDraining(msrest.serialization.Model):
"""Connection draining allows open connections to a backend server to be active for a specified time after the backend server got removed from the configuration.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Whether connection draining is enabled or not.
:type enabled: bool
:param drain_timeout_in_sec: Required. The number of seconds connection draining is active.
Acceptable values are from 1 second to 3600 seconds.
:type drain_timeout_in_sec: int
"""
_validation = {
'enabled': {'required': True},
'drain_timeout_in_sec': {'required': True, 'maximum': 3600, 'minimum': 1},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'drain_timeout_in_sec': {'key': 'drainTimeoutInSec', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayConnectionDraining, self).__init__(**kwargs)
self.enabled = kwargs['enabled']
self.drain_timeout_in_sec = kwargs['drain_timeout_in_sec']
class ApplicationGatewayCustomError(msrest.serialization.Model):
"""Customer error of an application gateway.
:param status_code: Status code of the application gateway customer error. Possible values
include: "HttpStatus403", "HttpStatus502".
:type status_code: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayCustomErrorStatusCode
:param custom_error_page_url: Error page URL of the application gateway customer error.
:type custom_error_page_url: str
"""
_attribute_map = {
'status_code': {'key': 'statusCode', 'type': 'str'},
'custom_error_page_url': {'key': 'customErrorPageUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayCustomError, self).__init__(**kwargs)
self.status_code = kwargs.get('status_code', None)
self.custom_error_page_url = kwargs.get('custom_error_page_url', None)
class ApplicationGatewayFirewallDisabledRuleGroup(msrest.serialization.Model):
"""Allows to disable rules within a rule group or an entire rule group.
All required parameters must be populated in order to send to Azure.
:param rule_group_name: Required. The name of the rule group that will be disabled.
:type rule_group_name: str
:param rules: The list of rules that will be disabled. If null, all rules of the rule group
will be disabled.
:type rules: list[int]
"""
_validation = {
'rule_group_name': {'required': True},
}
_attribute_map = {
'rule_group_name': {'key': 'ruleGroupName', 'type': 'str'},
'rules': {'key': 'rules', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallDisabledRuleGroup, self).__init__(**kwargs)
self.rule_group_name = kwargs['rule_group_name']
self.rules = kwargs.get('rules', None)
class ApplicationGatewayFirewallExclusion(msrest.serialization.Model):
"""Allow to exclude some variable satisfy the condition for the WAF check.
All required parameters must be populated in order to send to Azure.
:param match_variable: Required. The variable to be excluded.
:type match_variable: str
:param selector_match_operator: Required. When matchVariable is a collection, operate on the
selector to specify which elements in the collection this exclusion applies to.
:type selector_match_operator: str
:param selector: Required. When matchVariable is a collection, operator used to specify which
elements in the collection this exclusion applies to.
:type selector: str
"""
_validation = {
'match_variable': {'required': True},
'selector_match_operator': {'required': True},
'selector': {'required': True},
}
_attribute_map = {
'match_variable': {'key': 'matchVariable', 'type': 'str'},
'selector_match_operator': {'key': 'selectorMatchOperator', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallExclusion, self).__init__(**kwargs)
self.match_variable = kwargs['match_variable']
self.selector_match_operator = kwargs['selector_match_operator']
self.selector = kwargs['selector']
class ApplicationGatewayFirewallRule(msrest.serialization.Model):
"""A web application firewall rule.
All required parameters must be populated in order to send to Azure.
:param rule_id: Required. The identifier of the web application firewall rule.
:type rule_id: int
:param description: The description of the web application firewall rule.
:type description: str
"""
_validation = {
'rule_id': {'required': True},
}
_attribute_map = {
'rule_id': {'key': 'ruleId', 'type': 'int'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallRule, self).__init__(**kwargs)
self.rule_id = kwargs['rule_id']
self.description = kwargs.get('description', None)
class ApplicationGatewayFirewallRuleGroup(msrest.serialization.Model):
"""A web application firewall rule group.
All required parameters must be populated in order to send to Azure.
:param rule_group_name: Required. The name of the web application firewall rule group.
:type rule_group_name: str
:param description: The description of the web application firewall rule group.
:type description: str
:param rules: Required. The rules of the web application firewall rule group.
:type rules: list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayFirewallRule]
"""
_validation = {
'rule_group_name': {'required': True},
'rules': {'required': True},
}
_attribute_map = {
'rule_group_name': {'key': 'ruleGroupName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'rules': {'key': 'rules', 'type': '[ApplicationGatewayFirewallRule]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallRuleGroup, self).__init__(**kwargs)
self.rule_group_name = kwargs['rule_group_name']
self.description = kwargs.get('description', None)
self.rules = kwargs['rules']
class ApplicationGatewayFirewallRuleSet(Resource):
"""A web application firewall rule set.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar provisioning_state: The provisioning state of the web application firewall rule set.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param rule_set_type: The type of the web application firewall rule set.
:type rule_set_type: str
:param rule_set_version: The version of the web application firewall rule set type.
:type rule_set_version: str
:param rule_groups: The rule groups of the web application firewall rule set.
:type rule_groups:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayFirewallRuleGroup]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'rule_set_type': {'key': 'properties.ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'properties.ruleSetVersion', 'type': 'str'},
'rule_groups': {'key': 'properties.ruleGroups', 'type': '[ApplicationGatewayFirewallRuleGroup]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFirewallRuleSet, self).__init__(**kwargs)
self.provisioning_state = None
self.rule_set_type = kwargs.get('rule_set_type', None)
self.rule_set_version = kwargs.get('rule_set_version', None)
self.rule_groups = kwargs.get('rule_groups', None)
class ApplicationGatewayFrontendIPConfiguration(SubResource):
"""Frontend IP configuration of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the frontend IP configuration that is unique within an Application
Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param private_ip_address: PrivateIPAddress of the network interface IP Configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_09_01.models.IPAllocationMethod
:param subnet: Reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param public_ip_address: Reference of the PublicIP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar provisioning_state: The provisioning state of the frontend IP configuration resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFrontendIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = None
class ApplicationGatewayFrontendPort(SubResource):
"""Frontend port of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the frontend port that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param port: Frontend port.
:type port: int
:ivar provisioning_state: The provisioning state of the frontend port resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayFrontendPort, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.port = kwargs.get('port', None)
self.provisioning_state = None
class ApplicationGatewayHeaderConfiguration(msrest.serialization.Model):
"""Header configuration of the Actions set in Application Gateway.
:param header_name: Header name of the header configuration.
:type header_name: str
:param header_value: Header value of the header configuration.
:type header_value: str
"""
_attribute_map = {
'header_name': {'key': 'headerName', 'type': 'str'},
'header_value': {'key': 'headerValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayHeaderConfiguration, self).__init__(**kwargs)
self.header_name = kwargs.get('header_name', None)
self.header_value = kwargs.get('header_value', None)
class ApplicationGatewayHttpListener(SubResource):
"""Http listener of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the HTTP listener that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param frontend_ip_configuration: Frontend IP configuration resource of an application gateway.
:type frontend_ip_configuration: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param frontend_port: Frontend port resource of an application gateway.
:type frontend_port: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param protocol: Protocol of the HTTP listener. Possible values include: "Http", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayProtocol
:param host_name: Host name of HTTP listener.
:type host_name: str
:param ssl_certificate: SSL certificate resource of an application gateway.
:type ssl_certificate: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param require_server_name_indication: Applicable only if protocol is https. Enables SNI for
multi-hosting.
:type require_server_name_indication: bool
:ivar provisioning_state: The provisioning state of the HTTP listener resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param custom_error_configurations: Custom error configurations of the HTTP listener.
:type custom_error_configurations:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayCustomError]
:param firewall_policy: Reference to the FirewallPolicy resource.
:type firewall_policy: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param hostnames: List of Host names for HTTP Listener that allows special wildcard characters
as well.
:type hostnames: list[str]
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'},
'frontend_port': {'key': 'properties.frontendPort', 'type': 'SubResource'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'ssl_certificate': {'key': 'properties.sslCertificate', 'type': 'SubResource'},
'require_server_name_indication': {'key': 'properties.requireServerNameIndication', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'custom_error_configurations': {'key': 'properties.customErrorConfigurations', 'type': '[ApplicationGatewayCustomError]'},
'firewall_policy': {'key': 'properties.firewallPolicy', 'type': 'SubResource'},
'hostnames': {'key': 'properties.hostnames', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayHttpListener, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.frontend_ip_configuration = kwargs.get('frontend_ip_configuration', None)
self.frontend_port = kwargs.get('frontend_port', None)
self.protocol = kwargs.get('protocol', None)
self.host_name = kwargs.get('host_name', None)
self.ssl_certificate = kwargs.get('ssl_certificate', None)
self.require_server_name_indication = kwargs.get('require_server_name_indication', None)
self.provisioning_state = None
self.custom_error_configurations = kwargs.get('custom_error_configurations', None)
self.firewall_policy = kwargs.get('firewall_policy', None)
self.hostnames = kwargs.get('hostnames', None)
class ApplicationGatewayIPConfiguration(SubResource):
"""IP configuration of an application gateway. Currently 1 public and 1 private IP configuration is allowed.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the IP configuration that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param subnet: Reference of the subnet resource. A subnet from where application gateway gets
its private address.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar provisioning_state: The provisioning state of the application gateway IP configuration
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.subnet = kwargs.get('subnet', None)
self.provisioning_state = None
class ApplicationGatewayListResult(msrest.serialization.Model):
"""Response for ListApplicationGateways API service call.
:param value: List of an application gateways in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ApplicationGateway]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ApplicationGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ApplicationGatewayOnDemandProbe(msrest.serialization.Model):
"""Details of on demand test probe request.
:param protocol: The protocol used for the probe. Possible values include: "Http", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayProtocol
:param host: Host name to send the probe to.
:type host: str
:param path: Relative path of probe. Valid path starts from '/'. Probe is sent to
:code:`<Protocol>`://:code:`<host>`::code:`<port>`:code:`<path>`.
:type path: str
:param timeout: The probe timeout in seconds. Probe marked as failed if valid response is not
received with this timeout period. Acceptable values are from 1 second to 86400 seconds.
:type timeout: int
:param pick_host_name_from_backend_http_settings: Whether the host header should be picked from
the backend http settings. Default value is false.
:type pick_host_name_from_backend_http_settings: bool
:param match: Criterion for classifying a healthy probe response.
:type match: ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayProbeHealthResponseMatch
:param backend_address_pool: Reference of backend pool of application gateway to which probe
request will be sent.
:type backend_address_pool: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param backend_http_settings: Reference of backend http setting of application gateway to be
used for test probe.
:type backend_http_settings: ~azure.mgmt.network.v2019_09_01.models.SubResource
"""
_attribute_map = {
'protocol': {'key': 'protocol', 'type': 'str'},
'host': {'key': 'host', 'type': 'str'},
'path': {'key': 'path', 'type': 'str'},
'timeout': {'key': 'timeout', 'type': 'int'},
'pick_host_name_from_backend_http_settings': {'key': 'pickHostNameFromBackendHttpSettings', 'type': 'bool'},
'match': {'key': 'match', 'type': 'ApplicationGatewayProbeHealthResponseMatch'},
'backend_address_pool': {'key': 'backendAddressPool', 'type': 'SubResource'},
'backend_http_settings': {'key': 'backendHttpSettings', 'type': 'SubResource'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayOnDemandProbe, self).__init__(**kwargs)
self.protocol = kwargs.get('protocol', None)
self.host = kwargs.get('host', None)
self.path = kwargs.get('path', None)
self.timeout = kwargs.get('timeout', None)
self.pick_host_name_from_backend_http_settings = kwargs.get('pick_host_name_from_backend_http_settings', None)
self.match = kwargs.get('match', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_http_settings = kwargs.get('backend_http_settings', None)
class ApplicationGatewayPathRule(SubResource):
"""Path rule of URL path map of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the path rule that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param paths: Path rules of URL path map.
:type paths: list[str]
:param backend_address_pool: Backend address pool resource of URL path map path rule.
:type backend_address_pool: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param backend_http_settings: Backend http settings resource of URL path map path rule.
:type backend_http_settings: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param redirect_configuration: Redirect configuration resource of URL path map path rule.
:type redirect_configuration: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param rewrite_rule_set: Rewrite rule set resource of URL path map path rule.
:type rewrite_rule_set: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar provisioning_state: The provisioning state of the path rule resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param firewall_policy: Reference to the FirewallPolicy resource.
:type firewall_policy: ~azure.mgmt.network.v2019_09_01.models.SubResource
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'paths': {'key': 'properties.paths', 'type': '[str]'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'backend_http_settings': {'key': 'properties.backendHttpSettings', 'type': 'SubResource'},
'redirect_configuration': {'key': 'properties.redirectConfiguration', 'type': 'SubResource'},
'rewrite_rule_set': {'key': 'properties.rewriteRuleSet', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'firewall_policy': {'key': 'properties.firewallPolicy', 'type': 'SubResource'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayPathRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.paths = kwargs.get('paths', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_http_settings = kwargs.get('backend_http_settings', None)
self.redirect_configuration = kwargs.get('redirect_configuration', None)
self.rewrite_rule_set = kwargs.get('rewrite_rule_set', None)
self.provisioning_state = None
self.firewall_policy = kwargs.get('firewall_policy', None)
class ApplicationGatewayProbe(SubResource):
"""Probe of the application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the probe that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param protocol: The protocol used for the probe. Possible values include: "Http", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayProtocol
:param host: Host name to send the probe to.
:type host: str
:param path: Relative path of probe. Valid path starts from '/'. Probe is sent to
:code:`<Protocol>`://:code:`<host>`::code:`<port>`:code:`<path>`.
:type path: str
:param interval: The probing interval in seconds. This is the time interval between two
consecutive probes. Acceptable values are from 1 second to 86400 seconds.
:type interval: int
:param timeout: The probe timeout in seconds. Probe marked as failed if valid response is not
received with this timeout period. Acceptable values are from 1 second to 86400 seconds.
:type timeout: int
:param unhealthy_threshold: The probe retry count. Backend server is marked down after
consecutive probe failure count reaches UnhealthyThreshold. Acceptable values are from 1 second
to 20.
:type unhealthy_threshold: int
:param pick_host_name_from_backend_http_settings: Whether the host header should be picked from
the backend http settings. Default value is false.
:type pick_host_name_from_backend_http_settings: bool
:param min_servers: Minimum number of servers that are always marked healthy. Default value is
0.
:type min_servers: int
:param match: Criterion for classifying a healthy probe response.
:type match: ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayProbeHealthResponseMatch
:ivar provisioning_state: The provisioning state of the probe resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param port: Custom port which will be used for probing the backend servers. The valid value
ranges from 1 to 65535. In case not set, port from http settings will be used. This property is
valid for Standard_v2 and WAF_v2 only.
:type port: int
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
'port': {'maximum': 65535, 'minimum': 1},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'host': {'key': 'properties.host', 'type': 'str'},
'path': {'key': 'properties.path', 'type': 'str'},
'interval': {'key': 'properties.interval', 'type': 'int'},
'timeout': {'key': 'properties.timeout', 'type': 'int'},
'unhealthy_threshold': {'key': 'properties.unhealthyThreshold', 'type': 'int'},
'pick_host_name_from_backend_http_settings': {'key': 'properties.pickHostNameFromBackendHttpSettings', 'type': 'bool'},
'min_servers': {'key': 'properties.minServers', 'type': 'int'},
'match': {'key': 'properties.match', 'type': 'ApplicationGatewayProbeHealthResponseMatch'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayProbe, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.protocol = kwargs.get('protocol', None)
self.host = kwargs.get('host', None)
self.path = kwargs.get('path', None)
self.interval = kwargs.get('interval', None)
self.timeout = kwargs.get('timeout', None)
self.unhealthy_threshold = kwargs.get('unhealthy_threshold', None)
self.pick_host_name_from_backend_http_settings = kwargs.get('pick_host_name_from_backend_http_settings', None)
self.min_servers = kwargs.get('min_servers', None)
self.match = kwargs.get('match', None)
self.provisioning_state = None
self.port = kwargs.get('port', None)
class ApplicationGatewayProbeHealthResponseMatch(msrest.serialization.Model):
"""Application gateway probe health response match.
:param body: Body that must be contained in the health response. Default value is empty.
:type body: str
:param status_codes: Allowed ranges of healthy status codes. Default range of healthy status
codes is 200-399.
:type status_codes: list[str]
"""
_attribute_map = {
'body': {'key': 'body', 'type': 'str'},
'status_codes': {'key': 'statusCodes', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayProbeHealthResponseMatch, self).__init__(**kwargs)
self.body = kwargs.get('body', None)
self.status_codes = kwargs.get('status_codes', None)
class ApplicationGatewayRedirectConfiguration(SubResource):
"""Redirect configuration of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the redirect configuration that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param redirect_type: HTTP redirection type. Possible values include: "Permanent", "Found",
"SeeOther", "Temporary".
:type redirect_type: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayRedirectType
:param target_listener: Reference to a listener to redirect the request to.
:type target_listener: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param target_url: Url to redirect the request to.
:type target_url: str
:param include_path: Include path in the redirected url.
:type include_path: bool
:param include_query_string: Include query string in the redirected url.
:type include_query_string: bool
:param request_routing_rules: Request routing specifying redirect configuration.
:type request_routing_rules: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param url_path_maps: Url path maps specifying default redirect configuration.
:type url_path_maps: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param path_rules: Path rules specifying redirect configuration.
:type path_rules: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'redirect_type': {'key': 'properties.redirectType', 'type': 'str'},
'target_listener': {'key': 'properties.targetListener', 'type': 'SubResource'},
'target_url': {'key': 'properties.targetUrl', 'type': 'str'},
'include_path': {'key': 'properties.includePath', 'type': 'bool'},
'include_query_string': {'key': 'properties.includeQueryString', 'type': 'bool'},
'request_routing_rules': {'key': 'properties.requestRoutingRules', 'type': '[SubResource]'},
'url_path_maps': {'key': 'properties.urlPathMaps', 'type': '[SubResource]'},
'path_rules': {'key': 'properties.pathRules', 'type': '[SubResource]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRedirectConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.redirect_type = kwargs.get('redirect_type', None)
self.target_listener = kwargs.get('target_listener', None)
self.target_url = kwargs.get('target_url', None)
self.include_path = kwargs.get('include_path', None)
self.include_query_string = kwargs.get('include_query_string', None)
self.request_routing_rules = kwargs.get('request_routing_rules', None)
self.url_path_maps = kwargs.get('url_path_maps', None)
self.path_rules = kwargs.get('path_rules', None)
class ApplicationGatewayRequestRoutingRule(SubResource):
"""Request routing rule of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the request routing rule that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param rule_type: Rule type. Possible values include: "Basic", "PathBasedRouting".
:type rule_type: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayRequestRoutingRuleType
:param priority: Priority of the request routing rule.
:type priority: int
:param backend_address_pool: Backend address pool resource of the application gateway.
:type backend_address_pool: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param backend_http_settings: Backend http settings resource of the application gateway.
:type backend_http_settings: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param http_listener: Http listener resource of the application gateway.
:type http_listener: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param url_path_map: URL path map resource of the application gateway.
:type url_path_map: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param rewrite_rule_set: Rewrite Rule Set resource in Basic rule of the application gateway.
:type rewrite_rule_set: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param redirect_configuration: Redirect configuration resource of the application gateway.
:type redirect_configuration: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar provisioning_state: The provisioning state of the request routing rule resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'priority': {'maximum': 20000, 'minimum': 1},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'rule_type': {'key': 'properties.ruleType', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'backend_http_settings': {'key': 'properties.backendHttpSettings', 'type': 'SubResource'},
'http_listener': {'key': 'properties.httpListener', 'type': 'SubResource'},
'url_path_map': {'key': 'properties.urlPathMap', 'type': 'SubResource'},
'rewrite_rule_set': {'key': 'properties.rewriteRuleSet', 'type': 'SubResource'},
'redirect_configuration': {'key': 'properties.redirectConfiguration', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRequestRoutingRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.rule_type = kwargs.get('rule_type', None)
self.priority = kwargs.get('priority', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.backend_http_settings = kwargs.get('backend_http_settings', None)
self.http_listener = kwargs.get('http_listener', None)
self.url_path_map = kwargs.get('url_path_map', None)
self.rewrite_rule_set = kwargs.get('rewrite_rule_set', None)
self.redirect_configuration = kwargs.get('redirect_configuration', None)
self.provisioning_state = None
class ApplicationGatewayRewriteRule(msrest.serialization.Model):
"""Rewrite rule of an application gateway.
:param name: Name of the rewrite rule that is unique within an Application Gateway.
:type name: str
:param rule_sequence: Rule Sequence of the rewrite rule that determines the order of execution
of a particular rule in a RewriteRuleSet.
:type rule_sequence: int
:param conditions: Conditions based on which the action set execution will be evaluated.
:type conditions:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayRewriteRuleCondition]
:param action_set: Set of actions to be done as part of the rewrite Rule.
:type action_set: ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayRewriteRuleActionSet
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'rule_sequence': {'key': 'ruleSequence', 'type': 'int'},
'conditions': {'key': 'conditions', 'type': '[ApplicationGatewayRewriteRuleCondition]'},
'action_set': {'key': 'actionSet', 'type': 'ApplicationGatewayRewriteRuleActionSet'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRewriteRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.rule_sequence = kwargs.get('rule_sequence', None)
self.conditions = kwargs.get('conditions', None)
self.action_set = kwargs.get('action_set', None)
class ApplicationGatewayRewriteRuleActionSet(msrest.serialization.Model):
"""Set of actions in the Rewrite Rule in Application Gateway.
:param request_header_configurations: Request Header Actions in the Action Set.
:type request_header_configurations:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayHeaderConfiguration]
:param response_header_configurations: Response Header Actions in the Action Set.
:type response_header_configurations:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayHeaderConfiguration]
"""
_attribute_map = {
'request_header_configurations': {'key': 'requestHeaderConfigurations', 'type': '[ApplicationGatewayHeaderConfiguration]'},
'response_header_configurations': {'key': 'responseHeaderConfigurations', 'type': '[ApplicationGatewayHeaderConfiguration]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRewriteRuleActionSet, self).__init__(**kwargs)
self.request_header_configurations = kwargs.get('request_header_configurations', None)
self.response_header_configurations = kwargs.get('response_header_configurations', None)
class ApplicationGatewayRewriteRuleCondition(msrest.serialization.Model):
"""Set of conditions in the Rewrite Rule in Application Gateway.
:param variable: The condition parameter of the RewriteRuleCondition.
:type variable: str
:param pattern: The pattern, either fixed string or regular expression, that evaluates the
truthfulness of the condition.
:type pattern: str
:param ignore_case: Setting this paramter to truth value with force the pattern to do a case
in-sensitive comparison.
:type ignore_case: bool
:param negate: Setting this value as truth will force to check the negation of the condition
given by the user.
:type negate: bool
"""
_attribute_map = {
'variable': {'key': 'variable', 'type': 'str'},
'pattern': {'key': 'pattern', 'type': 'str'},
'ignore_case': {'key': 'ignoreCase', 'type': 'bool'},
'negate': {'key': 'negate', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRewriteRuleCondition, self).__init__(**kwargs)
self.variable = kwargs.get('variable', None)
self.pattern = kwargs.get('pattern', None)
self.ignore_case = kwargs.get('ignore_case', None)
self.negate = kwargs.get('negate', None)
class ApplicationGatewayRewriteRuleSet(SubResource):
"""Rewrite rule set of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the rewrite rule set that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param rewrite_rules: Rewrite rules in the rewrite rule set.
:type rewrite_rules: list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayRewriteRule]
:ivar provisioning_state: The provisioning state of the rewrite rule set resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'rewrite_rules': {'key': 'properties.rewriteRules', 'type': '[ApplicationGatewayRewriteRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayRewriteRuleSet, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.rewrite_rules = kwargs.get('rewrite_rules', None)
self.provisioning_state = None
class ApplicationGatewaySku(msrest.serialization.Model):
"""SKU of an application gateway.
:param name: Name of an application gateway SKU. Possible values include: "Standard_Small",
"Standard_Medium", "Standard_Large", "WAF_Medium", "WAF_Large", "Standard_v2", "WAF_v2".
:type name: str or ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySkuName
:param tier: Tier of an application gateway. Possible values include: "Standard", "WAF",
"Standard_v2", "WAF_v2".
:type tier: str or ~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayTier
:param capacity: Capacity (instance count) of an application gateway.
:type capacity: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewaySku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.capacity = kwargs.get('capacity', None)
class ApplicationGatewaySslCertificate(SubResource):
"""SSL certificates of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the SSL certificate that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param data: Base-64 encoded pfx certificate. Only applicable in PUT Request.
:type data: str
:param password: Password for the pfx file specified in data. Only applicable in PUT request.
:type password: str
:ivar public_cert_data: Base-64 encoded Public cert data corresponding to pfx specified in
data. Only applicable in GET request.
:vartype public_cert_data: str
:param key_vault_secret_id: Secret Id of (base-64 encoded unencrypted pfx) 'Secret' or
'Certificate' object stored in KeyVault.
:type key_vault_secret_id: str
:ivar provisioning_state: The provisioning state of the SSL certificate resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'public_cert_data': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'data': {'key': 'properties.data', 'type': 'str'},
'password': {'key': 'properties.password', 'type': 'str'},
'public_cert_data': {'key': 'properties.publicCertData', 'type': 'str'},
'key_vault_secret_id': {'key': 'properties.keyVaultSecretId', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewaySslCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.data = kwargs.get('data', None)
self.password = kwargs.get('password', None)
self.public_cert_data = None
self.key_vault_secret_id = kwargs.get('key_vault_secret_id', None)
self.provisioning_state = None
class ApplicationGatewaySslPolicy(msrest.serialization.Model):
"""Application Gateway Ssl policy.
:param disabled_ssl_protocols: Ssl protocols to be disabled on application gateway.
:type disabled_ssl_protocols: list[str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslProtocol]
:param policy_type: Type of Ssl Policy. Possible values include: "Predefined", "Custom".
:type policy_type: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslPolicyType
:param policy_name: Name of Ssl predefined policy. Possible values include:
"AppGwSslPolicy20150501", "AppGwSslPolicy20170401", "AppGwSslPolicy20170401S".
:type policy_name: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslPolicyName
:param cipher_suites: Ssl cipher suites to be enabled in the specified order to application
gateway.
:type cipher_suites: list[str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslCipherSuite]
:param min_protocol_version: Minimum version of Ssl protocol to be supported on application
gateway. Possible values include: "TLSv1_0", "TLSv1_1", "TLSv1_2".
:type min_protocol_version: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslProtocol
"""
_attribute_map = {
'disabled_ssl_protocols': {'key': 'disabledSslProtocols', 'type': '[str]'},
'policy_type': {'key': 'policyType', 'type': 'str'},
'policy_name': {'key': 'policyName', 'type': 'str'},
'cipher_suites': {'key': 'cipherSuites', 'type': '[str]'},
'min_protocol_version': {'key': 'minProtocolVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewaySslPolicy, self).__init__(**kwargs)
self.disabled_ssl_protocols = kwargs.get('disabled_ssl_protocols', None)
self.policy_type = kwargs.get('policy_type', None)
self.policy_name = kwargs.get('policy_name', None)
self.cipher_suites = kwargs.get('cipher_suites', None)
self.min_protocol_version = kwargs.get('min_protocol_version', None)
class ApplicationGatewaySslPredefinedPolicy(SubResource):
"""An Ssl predefined policy.
:param id: Resource ID.
:type id: str
:param name: Name of the Ssl predefined policy.
:type name: str
:param cipher_suites: Ssl cipher suites to be enabled in the specified order for application
gateway.
:type cipher_suites: list[str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslCipherSuite]
:param min_protocol_version: Minimum version of Ssl protocol to be supported on application
gateway. Possible values include: "TLSv1_0", "TLSv1_1", "TLSv1_2".
:type min_protocol_version: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewaySslProtocol
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'cipher_suites': {'key': 'properties.cipherSuites', 'type': '[str]'},
'min_protocol_version': {'key': 'properties.minProtocolVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewaySslPredefinedPolicy, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.cipher_suites = kwargs.get('cipher_suites', None)
self.min_protocol_version = kwargs.get('min_protocol_version', None)
class ApplicationGatewayTrustedRootCertificate(SubResource):
"""Trusted Root certificates of an application gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the trusted root certificate that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param data: Certificate public data.
:type data: str
:param key_vault_secret_id: Secret Id of (base-64 encoded unencrypted pfx) 'Secret' or
'Certificate' object stored in KeyVault.
:type key_vault_secret_id: str
:ivar provisioning_state: The provisioning state of the trusted root certificate resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'data': {'key': 'properties.data', 'type': 'str'},
'key_vault_secret_id': {'key': 'properties.keyVaultSecretId', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayTrustedRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.data = kwargs.get('data', None)
self.key_vault_secret_id = kwargs.get('key_vault_secret_id', None)
self.provisioning_state = None
class ApplicationGatewayUrlPathMap(SubResource):
"""UrlPathMaps give a url path to the backend mapping information for PathBasedRouting.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the URL path map that is unique within an Application Gateway.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param default_backend_address_pool: Default backend address pool resource of URL path map.
:type default_backend_address_pool: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param default_backend_http_settings: Default backend http settings resource of URL path map.
:type default_backend_http_settings: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param default_rewrite_rule_set: Default Rewrite rule set resource of URL path map.
:type default_rewrite_rule_set: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param default_redirect_configuration: Default redirect configuration resource of URL path map.
:type default_redirect_configuration: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param path_rules: Path rule of URL path map resource.
:type path_rules: list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayPathRule]
:ivar provisioning_state: The provisioning state of the URL path map resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'default_backend_address_pool': {'key': 'properties.defaultBackendAddressPool', 'type': 'SubResource'},
'default_backend_http_settings': {'key': 'properties.defaultBackendHttpSettings', 'type': 'SubResource'},
'default_rewrite_rule_set': {'key': 'properties.defaultRewriteRuleSet', 'type': 'SubResource'},
'default_redirect_configuration': {'key': 'properties.defaultRedirectConfiguration', 'type': 'SubResource'},
'path_rules': {'key': 'properties.pathRules', 'type': '[ApplicationGatewayPathRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayUrlPathMap, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.default_backend_address_pool = kwargs.get('default_backend_address_pool', None)
self.default_backend_http_settings = kwargs.get('default_backend_http_settings', None)
self.default_rewrite_rule_set = kwargs.get('default_rewrite_rule_set', None)
self.default_redirect_configuration = kwargs.get('default_redirect_configuration', None)
self.path_rules = kwargs.get('path_rules', None)
self.provisioning_state = None
class ApplicationGatewayWebApplicationFirewallConfiguration(msrest.serialization.Model):
"""Application gateway web application firewall configuration.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Whether the web application firewall is enabled or not.
:type enabled: bool
:param firewall_mode: Required. Web application firewall mode. Possible values include:
"Detection", "Prevention".
:type firewall_mode: str or
~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayFirewallMode
:param rule_set_type: Required. The type of the web application firewall rule set. Possible
values are: 'OWASP'.
:type rule_set_type: str
:param rule_set_version: Required. The version of the rule set type.
:type rule_set_version: str
:param disabled_rule_groups: The disabled rule groups.
:type disabled_rule_groups:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayFirewallDisabledRuleGroup]
:param request_body_check: Whether allow WAF to check request Body.
:type request_body_check: bool
:param max_request_body_size: Maximum request body size for WAF.
:type max_request_body_size: int
:param max_request_body_size_in_kb: Maximum request body size in Kb for WAF.
:type max_request_body_size_in_kb: int
:param file_upload_limit_in_mb: Maximum file upload size in Mb for WAF.
:type file_upload_limit_in_mb: int
:param exclusions: The exclusion list.
:type exclusions:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayFirewallExclusion]
"""
_validation = {
'enabled': {'required': True},
'firewall_mode': {'required': True},
'rule_set_type': {'required': True},
'rule_set_version': {'required': True},
'max_request_body_size': {'maximum': 128, 'minimum': 8},
'max_request_body_size_in_kb': {'maximum': 128, 'minimum': 8},
'file_upload_limit_in_mb': {'minimum': 0},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'firewall_mode': {'key': 'firewallMode', 'type': 'str'},
'rule_set_type': {'key': 'ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'ruleSetVersion', 'type': 'str'},
'disabled_rule_groups': {'key': 'disabledRuleGroups', 'type': '[ApplicationGatewayFirewallDisabledRuleGroup]'},
'request_body_check': {'key': 'requestBodyCheck', 'type': 'bool'},
'max_request_body_size': {'key': 'maxRequestBodySize', 'type': 'int'},
'max_request_body_size_in_kb': {'key': 'maxRequestBodySizeInKb', 'type': 'int'},
'file_upload_limit_in_mb': {'key': 'fileUploadLimitInMb', 'type': 'int'},
'exclusions': {'key': 'exclusions', 'type': '[ApplicationGatewayFirewallExclusion]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationGatewayWebApplicationFirewallConfiguration, self).__init__(**kwargs)
self.enabled = kwargs['enabled']
self.firewall_mode = kwargs['firewall_mode']
self.rule_set_type = kwargs['rule_set_type']
self.rule_set_version = kwargs['rule_set_version']
self.disabled_rule_groups = kwargs.get('disabled_rule_groups', None)
self.request_body_check = kwargs.get('request_body_check', None)
self.max_request_body_size = kwargs.get('max_request_body_size', None)
self.max_request_body_size_in_kb = kwargs.get('max_request_body_size_in_kb', None)
self.file_upload_limit_in_mb = kwargs.get('file_upload_limit_in_mb', None)
self.exclusions = kwargs.get('exclusions', None)
class FirewallPolicyRuleCondition(msrest.serialization.Model):
"""Properties of a rule.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: ApplicationRuleCondition, NetworkRuleCondition.
All required parameters must be populated in order to send to Azure.
:param name: Name of the rule condition.
:type name: str
:param description: Description of the rule condition.
:type description: str
:param rule_condition_type: Required. Rule Condition Type.Constant filled by server. Possible
values include: "ApplicationRuleCondition", "NetworkRuleCondition".
:type rule_condition_type: str or
~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleConditionType
"""
_validation = {
'rule_condition_type': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'rule_condition_type': {'key': 'ruleConditionType', 'type': 'str'},
}
_subtype_map = {
'rule_condition_type': {'ApplicationRuleCondition': 'ApplicationRuleCondition', 'NetworkRuleCondition': 'NetworkRuleCondition'}
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyRuleCondition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.rule_condition_type = None # type: Optional[str]
class ApplicationRuleCondition(FirewallPolicyRuleCondition):
"""Rule condition of type application.
All required parameters must be populated in order to send to Azure.
:param name: Name of the rule condition.
:type name: str
:param description: Description of the rule condition.
:type description: str
:param rule_condition_type: Required. Rule Condition Type.Constant filled by server. Possible
values include: "ApplicationRuleCondition", "NetworkRuleCondition".
:type rule_condition_type: str or
~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleConditionType
:param source_addresses: List of source IP addresses for this rule.
:type source_addresses: list[str]
:param destination_addresses: List of destination IP addresses or Service Tags.
:type destination_addresses: list[str]
:param protocols: Array of Application Protocols.
:type protocols:
list[~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleConditionApplicationProtocol]
:param target_fqdns: List of FQDNs for this rule condition.
:type target_fqdns: list[str]
:param fqdn_tags: List of FQDN Tags for this rule condition.
:type fqdn_tags: list[str]
"""
_validation = {
'rule_condition_type': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'rule_condition_type': {'key': 'ruleConditionType', 'type': 'str'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'},
'protocols': {'key': 'protocols', 'type': '[FirewallPolicyRuleConditionApplicationProtocol]'},
'target_fqdns': {'key': 'targetFqdns', 'type': '[str]'},
'fqdn_tags': {'key': 'fqdnTags', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ApplicationRuleCondition, self).__init__(**kwargs)
self.rule_condition_type = 'ApplicationRuleCondition' # type: str
self.source_addresses = kwargs.get('source_addresses', None)
self.destination_addresses = kwargs.get('destination_addresses', None)
self.protocols = kwargs.get('protocols', None)
self.target_fqdns = kwargs.get('target_fqdns', None)
self.fqdn_tags = kwargs.get('fqdn_tags', None)
class ApplicationSecurityGroup(Resource):
"""An application security group in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar resource_guid: The resource GUID property of the application security group resource. It
uniquely identifies a resource, even if the user changes its name or migrate the resource
across subscriptions or resource groups.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the application security group resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationSecurityGroup, self).__init__(**kwargs)
self.etag = None
self.resource_guid = None
self.provisioning_state = None
class ApplicationSecurityGroupListResult(msrest.serialization.Model):
"""A list of application security groups.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of application security groups.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ApplicationSecurityGroup]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ApplicationSecurityGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ApplicationSecurityGroupListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class AuthorizationListResult(msrest.serialization.Model):
"""Response for ListAuthorizations API service call retrieves all authorizations that belongs to an ExpressRouteCircuit.
:param value: The authorizations in an ExpressRoute Circuit.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitAuthorization]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitAuthorization]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AuthorizationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class AutoApprovedPrivateLinkService(msrest.serialization.Model):
"""The information of an AutoApprovedPrivateLinkService.
:param private_link_service: The id of the private link service resource.
:type private_link_service: str
"""
_attribute_map = {
'private_link_service': {'key': 'privateLinkService', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AutoApprovedPrivateLinkService, self).__init__(**kwargs)
self.private_link_service = kwargs.get('private_link_service', None)
class AutoApprovedPrivateLinkServicesResult(msrest.serialization.Model):
"""An array of private link service id that can be linked to a private end point with auto approved.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: An array of auto approved private link service.
:type value: list[~azure.mgmt.network.v2019_09_01.models.AutoApprovedPrivateLinkService]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AutoApprovedPrivateLinkService]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AutoApprovedPrivateLinkServicesResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class Availability(msrest.serialization.Model):
"""Availability of the metric.
:param time_grain: The time grain of the availability.
:type time_grain: str
:param retention: The retention of the availability.
:type retention: str
:param blob_duration: Duration of the availability blob.
:type blob_duration: str
"""
_attribute_map = {
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'retention': {'key': 'retention', 'type': 'str'},
'blob_duration': {'key': 'blobDuration', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Availability, self).__init__(**kwargs)
self.time_grain = kwargs.get('time_grain', None)
self.retention = kwargs.get('retention', None)
self.blob_duration = kwargs.get('blob_duration', None)
class AvailableDelegation(msrest.serialization.Model):
"""The serviceName of an AvailableDelegation indicates a possible delegation for a subnet.
:param name: The name of the AvailableDelegation resource.
:type name: str
:param id: A unique identifier of the AvailableDelegation resource.
:type id: str
:param type: Resource type.
:type type: str
:param service_name: The name of the service and resource.
:type service_name: str
:param actions: Describes the actions permitted to the service upon delegation.
:type actions: list[str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'service_name': {'key': 'serviceName', 'type': 'str'},
'actions': {'key': 'actions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AvailableDelegation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.type = kwargs.get('type', None)
self.service_name = kwargs.get('service_name', None)
self.actions = kwargs.get('actions', None)
class AvailableDelegationsResult(msrest.serialization.Model):
"""An array of available delegations.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: An array of available delegations.
:type value: list[~azure.mgmt.network.v2019_09_01.models.AvailableDelegation]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AvailableDelegation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailableDelegationsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class AvailablePrivateEndpointType(msrest.serialization.Model):
"""The information of an AvailablePrivateEndpointType.
:param name: The name of the service and resource.
:type name: str
:param id: A unique identifier of the AvailablePrivateEndpoint Type resource.
:type id: str
:param type: Resource type.
:type type: str
:param resource_name: The name of the service and resource.
:type resource_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'resource_name': {'key': 'resourceName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailablePrivateEndpointType, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.type = kwargs.get('type', None)
self.resource_name = kwargs.get('resource_name', None)
class AvailablePrivateEndpointTypesResult(msrest.serialization.Model):
"""An array of available PrivateEndpoint types.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: An array of available privateEndpoint type.
:type value: list[~azure.mgmt.network.v2019_09_01.models.AvailablePrivateEndpointType]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AvailablePrivateEndpointType]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailablePrivateEndpointTypesResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class AvailableProvidersList(msrest.serialization.Model):
"""List of available countries with details.
All required parameters must be populated in order to send to Azure.
:param countries: Required. List of available countries.
:type countries: list[~azure.mgmt.network.v2019_09_01.models.AvailableProvidersListCountry]
"""
_validation = {
'countries': {'required': True},
}
_attribute_map = {
'countries': {'key': 'countries', 'type': '[AvailableProvidersListCountry]'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersList, self).__init__(**kwargs)
self.countries = kwargs['countries']
class AvailableProvidersListCity(msrest.serialization.Model):
"""City or town details.
:param city_name: The city or town name.
:type city_name: str
:param providers: A list of Internet service providers.
:type providers: list[str]
"""
_attribute_map = {
'city_name': {'key': 'cityName', 'type': 'str'},
'providers': {'key': 'providers', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersListCity, self).__init__(**kwargs)
self.city_name = kwargs.get('city_name', None)
self.providers = kwargs.get('providers', None)
class AvailableProvidersListCountry(msrest.serialization.Model):
"""Country details.
:param country_name: The country name.
:type country_name: str
:param providers: A list of Internet service providers.
:type providers: list[str]
:param states: List of available states in the country.
:type states: list[~azure.mgmt.network.v2019_09_01.models.AvailableProvidersListState]
"""
_attribute_map = {
'country_name': {'key': 'countryName', 'type': 'str'},
'providers': {'key': 'providers', 'type': '[str]'},
'states': {'key': 'states', 'type': '[AvailableProvidersListState]'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersListCountry, self).__init__(**kwargs)
self.country_name = kwargs.get('country_name', None)
self.providers = kwargs.get('providers', None)
self.states = kwargs.get('states', None)
class AvailableProvidersListParameters(msrest.serialization.Model):
"""Constraints that determine the list of available Internet service providers.
:param azure_locations: A list of Azure regions.
:type azure_locations: list[str]
:param country: The country for available providers list.
:type country: str
:param state: The state for available providers list.
:type state: str
:param city: The city or town for available providers list.
:type city: str
"""
_attribute_map = {
'azure_locations': {'key': 'azureLocations', 'type': '[str]'},
'country': {'key': 'country', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'city': {'key': 'city', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersListParameters, self).__init__(**kwargs)
self.azure_locations = kwargs.get('azure_locations', None)
self.country = kwargs.get('country', None)
self.state = kwargs.get('state', None)
self.city = kwargs.get('city', None)
class AvailableProvidersListState(msrest.serialization.Model):
"""State details.
:param state_name: The state name.
:type state_name: str
:param providers: A list of Internet service providers.
:type providers: list[str]
:param cities: List of available cities or towns in the state.
:type cities: list[~azure.mgmt.network.v2019_09_01.models.AvailableProvidersListCity]
"""
_attribute_map = {
'state_name': {'key': 'stateName', 'type': 'str'},
'providers': {'key': 'providers', 'type': '[str]'},
'cities': {'key': 'cities', 'type': '[AvailableProvidersListCity]'},
}
def __init__(
self,
**kwargs
):
super(AvailableProvidersListState, self).__init__(**kwargs)
self.state_name = kwargs.get('state_name', None)
self.providers = kwargs.get('providers', None)
self.cities = kwargs.get('cities', None)
class AvailableServiceAlias(msrest.serialization.Model):
"""The available service alias.
:param name: The name of the service alias.
:type name: str
:param id: The ID of the service alias.
:type id: str
:param type: The type of the resource.
:type type: str
:param resource_name: The resource name of the service alias.
:type resource_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'resource_name': {'key': 'resourceName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailableServiceAlias, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.type = kwargs.get('type', None)
self.resource_name = kwargs.get('resource_name', None)
class AvailableServiceAliasesResult(msrest.serialization.Model):
"""An array of available service aliases.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: An array of available service aliases.
:type value: list[~azure.mgmt.network.v2019_09_01.models.AvailableServiceAlias]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AvailableServiceAlias]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AvailableServiceAliasesResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class AzureAsyncOperationResult(msrest.serialization.Model):
"""The response body contains the status of the specified asynchronous operation, indicating whether it has succeeded, is in progress, or has failed. Note that this status is distinct from the HTTP status code returned for the Get Operation Status operation itself. If the asynchronous operation succeeded, the response body includes the HTTP status code for the successful request. If the asynchronous operation failed, the response body includes the HTTP status code for the failed request and error information regarding the failure.
:param status: Status of the Azure async operation. Possible values include: "InProgress",
"Succeeded", "Failed".
:type status: str or ~azure.mgmt.network.v2019_09_01.models.NetworkOperationStatus
:param error: Details of the error occurred during specified asynchronous operation.
:type error: ~azure.mgmt.network.v2019_09_01.models.Error
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'Error'},
}
def __init__(
self,
**kwargs
):
super(AzureAsyncOperationResult, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.error = kwargs.get('error', None)
class AzureFirewall(Resource):
"""Azure Firewall resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param zones: A list of availability zones denoting where the resource needs to come from.
:type zones: list[str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param application_rule_collections: Collection of application rule collections used by Azure
Firewall.
:type application_rule_collections:
list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallApplicationRuleCollection]
:param nat_rule_collections: Collection of NAT rule collections used by Azure Firewall.
:type nat_rule_collections:
list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallNatRuleCollection]
:param network_rule_collections: Collection of network rule collections used by Azure Firewall.
:type network_rule_collections:
list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallNetworkRuleCollection]
:param ip_configurations: IP configuration of the Azure Firewall resource.
:type ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallIPConfiguration]
:ivar provisioning_state: The provisioning state of the Azure firewall resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param threat_intel_mode: The operation mode for Threat Intelligence. Possible values include:
"Alert", "Deny", "Off".
:type threat_intel_mode: str or
~azure.mgmt.network.v2019_09_01.models.AzureFirewallThreatIntelMode
:param virtual_hub: The virtualHub to which the firewall belongs.
:type virtual_hub: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param firewall_policy: The firewallPolicy associated with this azure firewall.
:type firewall_policy: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar hub_ip_addresses: IP addresses associated with AzureFirewall.
:vartype hub_ip_addresses: ~azure.mgmt.network.v2019_09_01.models.HubIPAddresses
:param sku: The Azure Firewall Resource SKU.
:type sku: ~azure.mgmt.network.v2019_09_01.models.AzureFirewallSku
:param additional_properties: The additional properties used to further config this azure
firewall.
:type additional_properties: dict[str, str]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'hub_ip_addresses': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'zones': {'key': 'zones', 'type': '[str]'},
'etag': {'key': 'etag', 'type': 'str'},
'application_rule_collections': {'key': 'properties.applicationRuleCollections', 'type': '[AzureFirewallApplicationRuleCollection]'},
'nat_rule_collections': {'key': 'properties.natRuleCollections', 'type': '[AzureFirewallNatRuleCollection]'},
'network_rule_collections': {'key': 'properties.networkRuleCollections', 'type': '[AzureFirewallNetworkRuleCollection]'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[AzureFirewallIPConfiguration]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'threat_intel_mode': {'key': 'properties.threatIntelMode', 'type': 'str'},
'virtual_hub': {'key': 'properties.virtualHub', 'type': 'SubResource'},
'firewall_policy': {'key': 'properties.firewallPolicy', 'type': 'SubResource'},
'hub_ip_addresses': {'key': 'properties.hubIpAddresses', 'type': 'HubIPAddresses'},
'sku': {'key': 'properties.sku', 'type': 'AzureFirewallSku'},
'additional_properties': {'key': 'properties.additionalProperties', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewall, self).__init__(**kwargs)
self.zones = kwargs.get('zones', None)
self.etag = None
self.application_rule_collections = kwargs.get('application_rule_collections', None)
self.nat_rule_collections = kwargs.get('nat_rule_collections', None)
self.network_rule_collections = kwargs.get('network_rule_collections', None)
self.ip_configurations = kwargs.get('ip_configurations', None)
self.provisioning_state = None
self.threat_intel_mode = kwargs.get('threat_intel_mode', None)
self.virtual_hub = kwargs.get('virtual_hub', None)
self.firewall_policy = kwargs.get('firewall_policy', None)
self.hub_ip_addresses = None
self.sku = kwargs.get('sku', None)
self.additional_properties = kwargs.get('additional_properties', None)
class AzureFirewallApplicationRule(msrest.serialization.Model):
"""Properties of an application rule.
:param name: Name of the application rule.
:type name: str
:param description: Description of the rule.
:type description: str
:param source_addresses: List of source IP addresses for this rule.
:type source_addresses: list[str]
:param protocols: Array of ApplicationRuleProtocols.
:type protocols:
list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallApplicationRuleProtocol]
:param target_fqdns: List of FQDNs for this rule.
:type target_fqdns: list[str]
:param fqdn_tags: List of FQDN Tags for this rule.
:type fqdn_tags: list[str]
:param source_ip_groups: List of source IpGroups for this rule.
:type source_ip_groups: list[str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'protocols': {'key': 'protocols', 'type': '[AzureFirewallApplicationRuleProtocol]'},
'target_fqdns': {'key': 'targetFqdns', 'type': '[str]'},
'fqdn_tags': {'key': 'fqdnTags', 'type': '[str]'},
'source_ip_groups': {'key': 'sourceIpGroups', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallApplicationRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.source_addresses = kwargs.get('source_addresses', None)
self.protocols = kwargs.get('protocols', None)
self.target_fqdns = kwargs.get('target_fqdns', None)
self.fqdn_tags = kwargs.get('fqdn_tags', None)
self.source_ip_groups = kwargs.get('source_ip_groups', None)
class AzureFirewallApplicationRuleCollection(SubResource):
"""Application rule collection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the Azure firewall. This name can
be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param priority: Priority of the application rule collection resource.
:type priority: int
:param action: The action type of a rule collection.
:type action: ~azure.mgmt.network.v2019_09_01.models.AzureFirewallRCAction
:param rules: Collection of rules used by a application rule collection.
:type rules: list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallApplicationRule]
:ivar provisioning_state: The provisioning state of the application rule collection resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'priority': {'maximum': 65000, 'minimum': 100},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'action': {'key': 'properties.action', 'type': 'AzureFirewallRCAction'},
'rules': {'key': 'properties.rules', 'type': '[AzureFirewallApplicationRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallApplicationRuleCollection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.priority = kwargs.get('priority', None)
self.action = kwargs.get('action', None)
self.rules = kwargs.get('rules', None)
self.provisioning_state = None
class AzureFirewallApplicationRuleProtocol(msrest.serialization.Model):
"""Properties of the application rule protocol.
:param protocol_type: Protocol type. Possible values include: "Http", "Https", "Mssql".
:type protocol_type: str or
~azure.mgmt.network.v2019_09_01.models.AzureFirewallApplicationRuleProtocolType
:param port: Port number for the protocol, cannot be greater than 64000. This field is
optional.
:type port: int
"""
_validation = {
'port': {'maximum': 64000, 'minimum': 0},
}
_attribute_map = {
'protocol_type': {'key': 'protocolType', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallApplicationRuleProtocol, self).__init__(**kwargs)
self.protocol_type = kwargs.get('protocol_type', None)
self.port = kwargs.get('port', None)
class AzureFirewallFqdnTag(Resource):
"""Azure Firewall FQDN Tag Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar provisioning_state: The provisioning state of the Azure firewall FQDN tag resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:ivar fqdn_tag_name: The name of this FQDN Tag.
:vartype fqdn_tag_name: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'fqdn_tag_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'fqdn_tag_name': {'key': 'properties.fqdnTagName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallFqdnTag, self).__init__(**kwargs)
self.etag = None
self.provisioning_state = None
self.fqdn_tag_name = None
class AzureFirewallFqdnTagListResult(msrest.serialization.Model):
"""Response for ListAzureFirewallFqdnTags API service call.
:param value: List of Azure Firewall FQDN Tags in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallFqdnTag]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[AzureFirewallFqdnTag]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallFqdnTagListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class AzureFirewallIPConfiguration(SubResource):
"""IP configuration of an Azure Firewall.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar private_ip_address: The Firewall Internal Load Balancer IP to be used as the next hop in
User Defined Routes.
:vartype private_ip_address: str
:param subnet: Reference of the subnet resource. This resource must be named
'AzureFirewallSubnet'.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param public_ip_address: Reference of the PublicIP resource. This field is a mandatory input
if subnet is not null.
:type public_ip_address: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar provisioning_state: The provisioning state of the Azure firewall IP configuration
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'private_ip_address': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.private_ip_address = None
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = None
class AzureFirewallListResult(msrest.serialization.Model):
"""Response for ListAzureFirewalls API service call.
:param value: List of Azure Firewalls in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.AzureFirewall]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[AzureFirewall]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class AzureFirewallNatRCAction(msrest.serialization.Model):
"""AzureFirewall NAT Rule Collection Action.
:param type: The type of action. Possible values include: "Snat", "Dnat".
:type type: str or ~azure.mgmt.network.v2019_09_01.models.AzureFirewallNatRCActionType
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNatRCAction, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
class AzureFirewallNatRule(msrest.serialization.Model):
"""Properties of a NAT rule.
:param name: Name of the NAT rule.
:type name: str
:param description: Description of the rule.
:type description: str
:param source_addresses: List of source IP addresses for this rule.
:type source_addresses: list[str]
:param destination_addresses: List of destination IP addresses for this rule. Supports IP
ranges, prefixes, and service tags.
:type destination_addresses: list[str]
:param destination_ports: List of destination ports.
:type destination_ports: list[str]
:param protocols: Array of AzureFirewallNetworkRuleProtocols applicable to this NAT rule.
:type protocols: list[str or
~azure.mgmt.network.v2019_09_01.models.AzureFirewallNetworkRuleProtocol]
:param translated_address: The translated address for this NAT rule.
:type translated_address: str
:param translated_port: The translated port for this NAT rule.
:type translated_port: str
:param translated_fqdn: The translated FQDN for this NAT rule.
:type translated_fqdn: str
:param source_ip_groups: List of source IpGroups for this rule.
:type source_ip_groups: list[str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'},
'destination_ports': {'key': 'destinationPorts', 'type': '[str]'},
'protocols': {'key': 'protocols', 'type': '[str]'},
'translated_address': {'key': 'translatedAddress', 'type': 'str'},
'translated_port': {'key': 'translatedPort', 'type': 'str'},
'translated_fqdn': {'key': 'translatedFqdn', 'type': 'str'},
'source_ip_groups': {'key': 'sourceIpGroups', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNatRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.source_addresses = kwargs.get('source_addresses', None)
self.destination_addresses = kwargs.get('destination_addresses', None)
self.destination_ports = kwargs.get('destination_ports', None)
self.protocols = kwargs.get('protocols', None)
self.translated_address = kwargs.get('translated_address', None)
self.translated_port = kwargs.get('translated_port', None)
self.translated_fqdn = kwargs.get('translated_fqdn', None)
self.source_ip_groups = kwargs.get('source_ip_groups', None)
class AzureFirewallNatRuleCollection(SubResource):
"""NAT rule collection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the Azure firewall. This name can
be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param priority: Priority of the NAT rule collection resource.
:type priority: int
:param action: The action type of a NAT rule collection.
:type action: ~azure.mgmt.network.v2019_09_01.models.AzureFirewallNatRCAction
:param rules: Collection of rules used by a NAT rule collection.
:type rules: list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallNatRule]
:ivar provisioning_state: The provisioning state of the NAT rule collection resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'priority': {'maximum': 65000, 'minimum': 100},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'action': {'key': 'properties.action', 'type': 'AzureFirewallNatRCAction'},
'rules': {'key': 'properties.rules', 'type': '[AzureFirewallNatRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNatRuleCollection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.priority = kwargs.get('priority', None)
self.action = kwargs.get('action', None)
self.rules = kwargs.get('rules', None)
self.provisioning_state = None
class AzureFirewallNetworkRule(msrest.serialization.Model):
"""Properties of the network rule.
:param name: Name of the network rule.
:type name: str
:param description: Description of the rule.
:type description: str
:param protocols: Array of AzureFirewallNetworkRuleProtocols.
:type protocols: list[str or
~azure.mgmt.network.v2019_09_01.models.AzureFirewallNetworkRuleProtocol]
:param source_addresses: List of source IP addresses for this rule.
:type source_addresses: list[str]
:param destination_addresses: List of destination IP addresses.
:type destination_addresses: list[str]
:param destination_ports: List of destination ports.
:type destination_ports: list[str]
:param destination_fqdns: List of destination FQDNs.
:type destination_fqdns: list[str]
:param source_ip_groups: List of source IpGroups for this rule.
:type source_ip_groups: list[str]
:param destination_ip_groups: List of destination IpGroups for this rule.
:type destination_ip_groups: list[str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'protocols': {'key': 'protocols', 'type': '[str]'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'},
'destination_ports': {'key': 'destinationPorts', 'type': '[str]'},
'destination_fqdns': {'key': 'destinationFqdns', 'type': '[str]'},
'source_ip_groups': {'key': 'sourceIpGroups', 'type': '[str]'},
'destination_ip_groups': {'key': 'destinationIpGroups', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNetworkRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
self.protocols = kwargs.get('protocols', None)
self.source_addresses = kwargs.get('source_addresses', None)
self.destination_addresses = kwargs.get('destination_addresses', None)
self.destination_ports = kwargs.get('destination_ports', None)
self.destination_fqdns = kwargs.get('destination_fqdns', None)
self.source_ip_groups = kwargs.get('source_ip_groups', None)
self.destination_ip_groups = kwargs.get('destination_ip_groups', None)
class AzureFirewallNetworkRuleCollection(SubResource):
"""Network rule collection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the Azure firewall. This name can
be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param priority: Priority of the network rule collection resource.
:type priority: int
:param action: The action type of a rule collection.
:type action: ~azure.mgmt.network.v2019_09_01.models.AzureFirewallRCAction
:param rules: Collection of rules used by a network rule collection.
:type rules: list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallNetworkRule]
:ivar provisioning_state: The provisioning state of the network rule collection resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'priority': {'maximum': 65000, 'minimum': 100},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'action': {'key': 'properties.action', 'type': 'AzureFirewallRCAction'},
'rules': {'key': 'properties.rules', 'type': '[AzureFirewallNetworkRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallNetworkRuleCollection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.priority = kwargs.get('priority', None)
self.action = kwargs.get('action', None)
self.rules = kwargs.get('rules', None)
self.provisioning_state = None
class AzureFirewallPublicIPAddress(msrest.serialization.Model):
"""Public IP Address associated with azure firewall.
:param address: Public IP Address value.
:type address: str
"""
_attribute_map = {
'address': {'key': 'address', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallPublicIPAddress, self).__init__(**kwargs)
self.address = kwargs.get('address', None)
class AzureFirewallRCAction(msrest.serialization.Model):
"""Properties of the AzureFirewallRCAction.
:param type: The type of action. Possible values include: "Allow", "Deny".
:type type: str or ~azure.mgmt.network.v2019_09_01.models.AzureFirewallRCActionType
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallRCAction, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
class AzureFirewallSku(msrest.serialization.Model):
"""SKU of an Azure Firewall.
:param name: Name of an Azure Firewall SKU. Possible values include: "AZFW_VNet", "AZFW_Hub".
:type name: str or ~azure.mgmt.network.v2019_09_01.models.AzureFirewallSkuName
:param tier: Tier of an Azure Firewall. Possible values include: "Standard".
:type tier: str or ~azure.mgmt.network.v2019_09_01.models.AzureFirewallSkuTier
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureFirewallSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
class AzureReachabilityReport(msrest.serialization.Model):
"""Azure reachability report details.
All required parameters must be populated in order to send to Azure.
:param aggregation_level: Required. The aggregation level of Azure reachability report. Can be
Country, State or City.
:type aggregation_level: str
:param provider_location: Required. Parameters that define a geographic location.
:type provider_location: ~azure.mgmt.network.v2019_09_01.models.AzureReachabilityReportLocation
:param reachability_report: Required. List of Azure reachability report items.
:type reachability_report:
list[~azure.mgmt.network.v2019_09_01.models.AzureReachabilityReportItem]
"""
_validation = {
'aggregation_level': {'required': True},
'provider_location': {'required': True},
'reachability_report': {'required': True},
}
_attribute_map = {
'aggregation_level': {'key': 'aggregationLevel', 'type': 'str'},
'provider_location': {'key': 'providerLocation', 'type': 'AzureReachabilityReportLocation'},
'reachability_report': {'key': 'reachabilityReport', 'type': '[AzureReachabilityReportItem]'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReport, self).__init__(**kwargs)
self.aggregation_level = kwargs['aggregation_level']
self.provider_location = kwargs['provider_location']
self.reachability_report = kwargs['reachability_report']
class AzureReachabilityReportItem(msrest.serialization.Model):
"""Azure reachability report details for a given provider location.
:param provider: The Internet service provider.
:type provider: str
:param azure_location: The Azure region.
:type azure_location: str
:param latencies: List of latency details for each of the time series.
:type latencies:
list[~azure.mgmt.network.v2019_09_01.models.AzureReachabilityReportLatencyInfo]
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'azure_location': {'key': 'azureLocation', 'type': 'str'},
'latencies': {'key': 'latencies', 'type': '[AzureReachabilityReportLatencyInfo]'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReportItem, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.azure_location = kwargs.get('azure_location', None)
self.latencies = kwargs.get('latencies', None)
class AzureReachabilityReportLatencyInfo(msrest.serialization.Model):
"""Details on latency for a time series.
:param time_stamp: The time stamp.
:type time_stamp: ~datetime.datetime
:param score: The relative latency score between 1 and 100, higher values indicating a faster
connection.
:type score: int
"""
_validation = {
'score': {'maximum': 100, 'minimum': 1},
}
_attribute_map = {
'time_stamp': {'key': 'timeStamp', 'type': 'iso-8601'},
'score': {'key': 'score', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReportLatencyInfo, self).__init__(**kwargs)
self.time_stamp = kwargs.get('time_stamp', None)
self.score = kwargs.get('score', None)
class AzureReachabilityReportLocation(msrest.serialization.Model):
"""Parameters that define a geographic location.
All required parameters must be populated in order to send to Azure.
:param country: Required. The name of the country.
:type country: str
:param state: The name of the state.
:type state: str
:param city: The name of the city or town.
:type city: str
"""
_validation = {
'country': {'required': True},
}
_attribute_map = {
'country': {'key': 'country', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'city': {'key': 'city', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReportLocation, self).__init__(**kwargs)
self.country = kwargs['country']
self.state = kwargs.get('state', None)
self.city = kwargs.get('city', None)
class AzureReachabilityReportParameters(msrest.serialization.Model):
"""Geographic and time constraints for Azure reachability report.
All required parameters must be populated in order to send to Azure.
:param provider_location: Required. Parameters that define a geographic location.
:type provider_location: ~azure.mgmt.network.v2019_09_01.models.AzureReachabilityReportLocation
:param providers: List of Internet service providers.
:type providers: list[str]
:param azure_locations: Optional Azure regions to scope the query to.
:type azure_locations: list[str]
:param start_time: Required. The start time for the Azure reachability report.
:type start_time: ~datetime.datetime
:param end_time: Required. The end time for the Azure reachability report.
:type end_time: ~datetime.datetime
"""
_validation = {
'provider_location': {'required': True},
'start_time': {'required': True},
'end_time': {'required': True},
}
_attribute_map = {
'provider_location': {'key': 'providerLocation', 'type': 'AzureReachabilityReportLocation'},
'providers': {'key': 'providers', 'type': '[str]'},
'azure_locations': {'key': 'azureLocations', 'type': '[str]'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(
self,
**kwargs
):
super(AzureReachabilityReportParameters, self).__init__(**kwargs)
self.provider_location = kwargs['provider_location']
self.providers = kwargs.get('providers', None)
self.azure_locations = kwargs.get('azure_locations', None)
self.start_time = kwargs['start_time']
self.end_time = kwargs['end_time']
class BackendAddressPool(SubResource):
"""Pool of backend IP addresses.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the set of backend address pools
used by the load balancer. This name can be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:ivar backend_ip_configurations: An array of references to IP addresses defined in network
interfaces.
:vartype backend_ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceIPConfiguration]
:ivar load_balancing_rules: An array of references to load balancing rules that use this
backend address pool.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar outbound_rule: A reference to an outbound rule that uses this backend address pool.
:vartype outbound_rule: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar outbound_rules: An array of references to outbound rules that use this backend address
pool.
:vartype outbound_rules: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar provisioning_state: The provisioning state of the backend address pool resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'backend_ip_configurations': {'readonly': True},
'load_balancing_rules': {'readonly': True},
'outbound_rule': {'readonly': True},
'outbound_rules': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'backend_ip_configurations': {'key': 'properties.backendIPConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'},
'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'},
'outbound_rule': {'key': 'properties.outboundRule', 'type': 'SubResource'},
'outbound_rules': {'key': 'properties.outboundRules', 'type': '[SubResource]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BackendAddressPool, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.backend_ip_configurations = None
self.load_balancing_rules = None
self.outbound_rule = None
self.outbound_rules = None
self.provisioning_state = None
class BastionActiveSession(msrest.serialization.Model):
"""The session detail for a target.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar session_id: A unique id for the session.
:vartype session_id: str
:ivar start_time: The time when the session started.
:vartype start_time: object
:ivar target_subscription_id: The subscription id for the target virtual machine.
:vartype target_subscription_id: str
:ivar resource_type: The type of the resource.
:vartype resource_type: str
:ivar target_host_name: The host name of the target.
:vartype target_host_name: str
:ivar target_resource_group: The resource group of the target.
:vartype target_resource_group: str
:ivar user_name: The user name who is active on this session.
:vartype user_name: str
:ivar target_ip_address: The IP Address of the target.
:vartype target_ip_address: str
:ivar protocol: The protocol used to connect to the target. Possible values include: "SSH",
"RDP".
:vartype protocol: str or ~azure.mgmt.network.v2019_09_01.models.BastionConnectProtocol
:ivar target_resource_id: The resource id of the target.
:vartype target_resource_id: str
:ivar session_duration_in_mins: Duration in mins the session has been active.
:vartype session_duration_in_mins: float
"""
_validation = {
'session_id': {'readonly': True},
'start_time': {'readonly': True},
'target_subscription_id': {'readonly': True},
'resource_type': {'readonly': True},
'target_host_name': {'readonly': True},
'target_resource_group': {'readonly': True},
'user_name': {'readonly': True},
'target_ip_address': {'readonly': True},
'protocol': {'readonly': True},
'target_resource_id': {'readonly': True},
'session_duration_in_mins': {'readonly': True},
}
_attribute_map = {
'session_id': {'key': 'sessionId', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'object'},
'target_subscription_id': {'key': 'targetSubscriptionId', 'type': 'str'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'target_host_name': {'key': 'targetHostName', 'type': 'str'},
'target_resource_group': {'key': 'targetResourceGroup', 'type': 'str'},
'user_name': {'key': 'userName', 'type': 'str'},
'target_ip_address': {'key': 'targetIpAddress', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'session_duration_in_mins': {'key': 'sessionDurationInMins', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(BastionActiveSession, self).__init__(**kwargs)
self.session_id = None
self.start_time = None
self.target_subscription_id = None
self.resource_type = None
self.target_host_name = None
self.target_resource_group = None
self.user_name = None
self.target_ip_address = None
self.protocol = None
self.target_resource_id = None
self.session_duration_in_mins = None
class BastionActiveSessionListResult(msrest.serialization.Model):
"""Response for GetActiveSessions.
:param value: List of active sessions on the bastion.
:type value: list[~azure.mgmt.network.v2019_09_01.models.BastionActiveSession]
:param next_link: Gets or sets the URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[BastionActiveSession]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionActiveSessionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class BastionHost(Resource):
"""Bastion Host resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param ip_configurations: IP configuration of the Bastion Host resource.
:type ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.BastionHostIPConfiguration]
:param dns_name: FQDN for the endpoint on which bastion host is accessible.
:type dns_name: str
:ivar provisioning_state: The provisioning state of the bastion host resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[BastionHostIPConfiguration]'},
'dns_name': {'key': 'properties.dnsName', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionHost, self).__init__(**kwargs)
self.etag = None
self.ip_configurations = kwargs.get('ip_configurations', None)
self.dns_name = kwargs.get('dns_name', None)
self.provisioning_state = None
class BastionHostIPConfiguration(SubResource):
"""IP configuration of an Bastion Host.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Ip configuration type.
:vartype type: str
:param subnet: Reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param public_ip_address: Reference of the PublicIP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar provisioning_state: The provisioning state of the bastion host IP configuration resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param private_ip_allocation_method: Private IP allocation method. Possible values include:
"Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_09_01.models.IPAllocationMethod
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionHostIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = None
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
class BastionHostListResult(msrest.serialization.Model):
"""Response for ListBastionHosts API service call.
:param value: List of Bastion Hosts in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.BastionHost]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[BastionHost]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionHostListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class BastionSessionDeleteResult(msrest.serialization.Model):
"""Response for DisconnectActiveSessions.
:param value: List of sessions with their corresponding state.
:type value: list[~azure.mgmt.network.v2019_09_01.models.BastionSessionState]
:param next_link: Gets or sets the URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[BastionSessionState]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionSessionDeleteResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class BastionSessionState(msrest.serialization.Model):
"""The session state detail for a target.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar session_id: A unique id for the session.
:vartype session_id: str
:ivar message: Used for extra information.
:vartype message: str
:ivar state: The state of the session. Disconnected/Failed/NotFound.
:vartype state: str
"""
_validation = {
'session_id': {'readonly': True},
'message': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'session_id': {'key': 'sessionId', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionSessionState, self).__init__(**kwargs)
self.session_id = None
self.message = None
self.state = None
class BastionShareableLink(msrest.serialization.Model):
"""Bastion Shareable Link.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param vm: Required. Reference of the virtual machine resource.
:type vm: ~azure.mgmt.network.v2019_09_01.models.Resource
:ivar bsl: The unique Bastion Shareable Link to the virtual machine.
:vartype bsl: str
:ivar created_at: The time when the link was created.
:vartype created_at: str
:ivar message: Optional field indicating the warning or error message related to the vm in case
of partial failure.
:vartype message: str
"""
_validation = {
'vm': {'required': True},
'bsl': {'readonly': True},
'created_at': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'vm': {'key': 'vm', 'type': 'Resource'},
'bsl': {'key': 'bsl', 'type': 'str'},
'created_at': {'key': 'createdAt', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionShareableLink, self).__init__(**kwargs)
self.vm = kwargs['vm']
self.bsl = None
self.created_at = None
self.message = None
class BastionShareableLinkListRequest(msrest.serialization.Model):
"""Post request for all the Bastion Shareable Link endpoints.
:param vms: List of VM references.
:type vms: list[~azure.mgmt.network.v2019_09_01.models.BastionShareableLink]
"""
_attribute_map = {
'vms': {'key': 'vms', 'type': '[BastionShareableLink]'},
}
def __init__(
self,
**kwargs
):
super(BastionShareableLinkListRequest, self).__init__(**kwargs)
self.vms = kwargs.get('vms', None)
class BastionShareableLinkListResult(msrest.serialization.Model):
"""Response for all the Bastion Shareable Link endpoints.
:param value: List of Bastion Shareable Links for the request.
:type value: list[~azure.mgmt.network.v2019_09_01.models.BastionShareableLink]
:param next_link: Gets or sets the URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[BastionShareableLink]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BastionShareableLinkListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class BGPCommunity(msrest.serialization.Model):
"""Contains bgp community information offered in Service Community resources.
:param service_supported_region: The region which the service support. e.g. For O365, region is
Global.
:type service_supported_region: str
:param community_name: The name of the bgp community. e.g. Skype.
:type community_name: str
:param community_value: The value of the bgp community. For more information:
https://docs.microsoft.com/en-us/azure/expressroute/expressroute-routing.
:type community_value: str
:param community_prefixes: The prefixes that the bgp community contains.
:type community_prefixes: list[str]
:param is_authorized_to_use: Customer is authorized to use bgp community or not.
:type is_authorized_to_use: bool
:param service_group: The service group of the bgp community contains.
:type service_group: str
"""
_attribute_map = {
'service_supported_region': {'key': 'serviceSupportedRegion', 'type': 'str'},
'community_name': {'key': 'communityName', 'type': 'str'},
'community_value': {'key': 'communityValue', 'type': 'str'},
'community_prefixes': {'key': 'communityPrefixes', 'type': '[str]'},
'is_authorized_to_use': {'key': 'isAuthorizedToUse', 'type': 'bool'},
'service_group': {'key': 'serviceGroup', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BGPCommunity, self).__init__(**kwargs)
self.service_supported_region = kwargs.get('service_supported_region', None)
self.community_name = kwargs.get('community_name', None)
self.community_value = kwargs.get('community_value', None)
self.community_prefixes = kwargs.get('community_prefixes', None)
self.is_authorized_to_use = kwargs.get('is_authorized_to_use', None)
self.service_group = kwargs.get('service_group', None)
class BgpPeerStatus(msrest.serialization.Model):
"""BGP peer status details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar local_address: The virtual network gateway's local address.
:vartype local_address: str
:ivar neighbor: The remote BGP peer.
:vartype neighbor: str
:ivar asn: The autonomous system number of the remote BGP peer.
:vartype asn: int
:ivar state: The BGP peer state. Possible values include: "Unknown", "Stopped", "Idle",
"Connecting", "Connected".
:vartype state: str or ~azure.mgmt.network.v2019_09_01.models.BgpPeerState
:ivar connected_duration: For how long the peering has been up.
:vartype connected_duration: str
:ivar routes_received: The number of routes learned from this peer.
:vartype routes_received: long
:ivar messages_sent: The number of BGP messages sent.
:vartype messages_sent: long
:ivar messages_received: The number of BGP messages received.
:vartype messages_received: long
"""
_validation = {
'local_address': {'readonly': True},
'neighbor': {'readonly': True},
'asn': {'readonly': True},
'state': {'readonly': True},
'connected_duration': {'readonly': True},
'routes_received': {'readonly': True},
'messages_sent': {'readonly': True},
'messages_received': {'readonly': True},
}
_attribute_map = {
'local_address': {'key': 'localAddress', 'type': 'str'},
'neighbor': {'key': 'neighbor', 'type': 'str'},
'asn': {'key': 'asn', 'type': 'int'},
'state': {'key': 'state', 'type': 'str'},
'connected_duration': {'key': 'connectedDuration', 'type': 'str'},
'routes_received': {'key': 'routesReceived', 'type': 'long'},
'messages_sent': {'key': 'messagesSent', 'type': 'long'},
'messages_received': {'key': 'messagesReceived', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(BgpPeerStatus, self).__init__(**kwargs)
self.local_address = None
self.neighbor = None
self.asn = None
self.state = None
self.connected_duration = None
self.routes_received = None
self.messages_sent = None
self.messages_received = None
class BgpPeerStatusListResult(msrest.serialization.Model):
"""Response for list BGP peer status API service call.
:param value: List of BGP peers.
:type value: list[~azure.mgmt.network.v2019_09_01.models.BgpPeerStatus]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[BgpPeerStatus]'},
}
def __init__(
self,
**kwargs
):
super(BgpPeerStatusListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class BgpServiceCommunity(Resource):
"""Service Community Properties.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param service_name: The name of the bgp community. e.g. Skype.
:type service_name: str
:param bgp_communities: A list of bgp communities.
:type bgp_communities: list[~azure.mgmt.network.v2019_09_01.models.BGPCommunity]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'service_name': {'key': 'properties.serviceName', 'type': 'str'},
'bgp_communities': {'key': 'properties.bgpCommunities', 'type': '[BGPCommunity]'},
}
def __init__(
self,
**kwargs
):
super(BgpServiceCommunity, self).__init__(**kwargs)
self.service_name = kwargs.get('service_name', None)
self.bgp_communities = kwargs.get('bgp_communities', None)
class BgpServiceCommunityListResult(msrest.serialization.Model):
"""Response for the ListServiceCommunity API service call.
:param value: A list of service community resources.
:type value: list[~azure.mgmt.network.v2019_09_01.models.BgpServiceCommunity]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[BgpServiceCommunity]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(BgpServiceCommunityListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class BgpSettings(msrest.serialization.Model):
"""BGP settings details.
:param asn: The BGP speaker's ASN.
:type asn: long
:param bgp_peering_address: The BGP peering address and BGP identifier of this BGP speaker.
:type bgp_peering_address: str
:param peer_weight: The weight added to routes learned from this BGP speaker.
:type peer_weight: int
"""
_attribute_map = {
'asn': {'key': 'asn', 'type': 'long'},
'bgp_peering_address': {'key': 'bgpPeeringAddress', 'type': 'str'},
'peer_weight': {'key': 'peerWeight', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(BgpSettings, self).__init__(**kwargs)
self.asn = kwargs.get('asn', None)
self.bgp_peering_address = kwargs.get('bgp_peering_address', None)
self.peer_weight = kwargs.get('peer_weight', None)
class CheckPrivateLinkServiceVisibilityRequest(msrest.serialization.Model):
"""Request body of the CheckPrivateLinkServiceVisibility API service call.
:param private_link_service_alias: The alias of the private link service.
:type private_link_service_alias: str
"""
_attribute_map = {
'private_link_service_alias': {'key': 'privateLinkServiceAlias', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CheckPrivateLinkServiceVisibilityRequest, self).__init__(**kwargs)
self.private_link_service_alias = kwargs.get('private_link_service_alias', None)
class CloudErrorBody(msrest.serialization.Model):
"""An error response from the service.
:param code: An identifier for the error. Codes are invariant and are intended to be consumed
programmatically.
:type code: str
:param message: A message describing the error, intended to be suitable for display in a user
interface.
:type message: str
:param target: The target of the particular error. For example, the name of the property in
error.
:type target: str
:param details: A list of additional details about the error.
:type details: list[~azure.mgmt.network.v2019_09_01.models.CloudErrorBody]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudErrorBody]'},
}
def __init__(
self,
**kwargs
):
super(CloudErrorBody, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
class Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model):
"""Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar principal_id: The principal id of user assigned identity.
:vartype principal_id: str
:ivar client_id: The client id of user assigned identity.
:vartype client_id: str
"""
_validation = {
'principal_id': {'readonly': True},
'client_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs)
self.principal_id = None
self.client_id = None
class ConnectionMonitor(msrest.serialization.Model):
"""Parameters that define the operation to create a connection monitor.
All required parameters must be populated in order to send to Azure.
:param location: Connection monitor location.
:type location: str
:param tags: A set of tags. Connection monitor tags.
:type tags: dict[str, str]
:param source: Required. Describes the source of connection monitor.
:type source: ~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorSource
:param destination: Required. Describes the destination of connection monitor.
:type destination: ~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorDestination
:param auto_start: Determines if the connection monitor will start automatically once created.
:type auto_start: bool
:param monitoring_interval_in_seconds: Monitoring interval in seconds.
:type monitoring_interval_in_seconds: int
"""
_validation = {
'source': {'required': True},
'destination': {'required': True},
}
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'source': {'key': 'properties.source', 'type': 'ConnectionMonitorSource'},
'destination': {'key': 'properties.destination', 'type': 'ConnectionMonitorDestination'},
'auto_start': {'key': 'properties.autoStart', 'type': 'bool'},
'monitoring_interval_in_seconds': {'key': 'properties.monitoringIntervalInSeconds', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitor, self).__init__(**kwargs)
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
self.source = kwargs['source']
self.destination = kwargs['destination']
self.auto_start = kwargs.get('auto_start', True)
self.monitoring_interval_in_seconds = kwargs.get('monitoring_interval_in_seconds', 60)
class ConnectionMonitorDestination(msrest.serialization.Model):
"""Describes the destination of connection monitor.
:param resource_id: The ID of the resource used as the destination by connection monitor.
:type resource_id: str
:param address: Address of the connection monitor destination (IP or domain name).
:type address: str
:param port: The destination port used by connection monitor.
:type port: int
"""
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorDestination, self).__init__(**kwargs)
self.resource_id = kwargs.get('resource_id', None)
self.address = kwargs.get('address', None)
self.port = kwargs.get('port', None)
class ConnectionMonitorListResult(msrest.serialization.Model):
"""List of connection monitors.
:param value: Information about connection monitors.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorResult]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ConnectionMonitorResult]'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class ConnectionMonitorParameters(msrest.serialization.Model):
"""Parameters that define the operation to create a connection monitor.
All required parameters must be populated in order to send to Azure.
:param source: Required. Describes the source of connection monitor.
:type source: ~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorSource
:param destination: Required. Describes the destination of connection monitor.
:type destination: ~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorDestination
:param auto_start: Determines if the connection monitor will start automatically once created.
:type auto_start: bool
:param monitoring_interval_in_seconds: Monitoring interval in seconds.
:type monitoring_interval_in_seconds: int
"""
_validation = {
'source': {'required': True},
'destination': {'required': True},
}
_attribute_map = {
'source': {'key': 'source', 'type': 'ConnectionMonitorSource'},
'destination': {'key': 'destination', 'type': 'ConnectionMonitorDestination'},
'auto_start': {'key': 'autoStart', 'type': 'bool'},
'monitoring_interval_in_seconds': {'key': 'monitoringIntervalInSeconds', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorParameters, self).__init__(**kwargs)
self.source = kwargs['source']
self.destination = kwargs['destination']
self.auto_start = kwargs.get('auto_start', True)
self.monitoring_interval_in_seconds = kwargs.get('monitoring_interval_in_seconds', 60)
class ConnectionMonitorQueryResult(msrest.serialization.Model):
"""List of connection states snapshots.
:param source_status: Status of connection monitor source. Possible values include: "Unknown",
"Active", "Inactive".
:type source_status: str or
~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorSourceStatus
:param states: Information about connection states.
:type states: list[~azure.mgmt.network.v2019_09_01.models.ConnectionStateSnapshot]
"""
_attribute_map = {
'source_status': {'key': 'sourceStatus', 'type': 'str'},
'states': {'key': 'states', 'type': '[ConnectionStateSnapshot]'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorQueryResult, self).__init__(**kwargs)
self.source_status = kwargs.get('source_status', None)
self.states = kwargs.get('states', None)
class ConnectionMonitorResult(msrest.serialization.Model):
"""Information about the connection monitor.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Name of the connection monitor.
:vartype name: str
:ivar id: ID of the connection monitor.
:vartype id: str
:param etag: A unique read-only string that changes whenever the resource is updated.
:type etag: str
:ivar type: Connection monitor type.
:vartype type: str
:param location: Connection monitor location.
:type location: str
:param tags: A set of tags. Connection monitor tags.
:type tags: dict[str, str]
:param source: Describes the source of connection monitor.
:type source: ~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorSource
:param destination: Describes the destination of connection monitor.
:type destination: ~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorDestination
:param auto_start: Determines if the connection monitor will start automatically once created.
:type auto_start: bool
:param monitoring_interval_in_seconds: Monitoring interval in seconds.
:type monitoring_interval_in_seconds: int
:ivar provisioning_state: The provisioning state of the connection monitor. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param start_time: The date and time when the connection monitor was started.
:type start_time: ~datetime.datetime
:param monitoring_status: The monitoring status of the connection monitor.
:type monitoring_status: str
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'source': {'key': 'properties.source', 'type': 'ConnectionMonitorSource'},
'destination': {'key': 'properties.destination', 'type': 'ConnectionMonitorDestination'},
'auto_start': {'key': 'properties.autoStart', 'type': 'bool'},
'monitoring_interval_in_seconds': {'key': 'properties.monitoringIntervalInSeconds', 'type': 'int'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'monitoring_status': {'key': 'properties.monitoringStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorResult, self).__init__(**kwargs)
self.name = None
self.id = None
self.etag = kwargs.get('etag', "A unique read-only string that changes whenever the resource is updated.")
self.type = None
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
self.source = kwargs.get('source', None)
self.destination = kwargs.get('destination', None)
self.auto_start = kwargs.get('auto_start', True)
self.monitoring_interval_in_seconds = kwargs.get('monitoring_interval_in_seconds', 60)
self.provisioning_state = None
self.start_time = kwargs.get('start_time', None)
self.monitoring_status = kwargs.get('monitoring_status', None)
class ConnectionMonitorResultProperties(ConnectionMonitorParameters):
"""Describes the properties of a connection monitor.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param source: Required. Describes the source of connection monitor.
:type source: ~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorSource
:param destination: Required. Describes the destination of connection monitor.
:type destination: ~azure.mgmt.network.v2019_09_01.models.ConnectionMonitorDestination
:param auto_start: Determines if the connection monitor will start automatically once created.
:type auto_start: bool
:param monitoring_interval_in_seconds: Monitoring interval in seconds.
:type monitoring_interval_in_seconds: int
:ivar provisioning_state: The provisioning state of the connection monitor. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param start_time: The date and time when the connection monitor was started.
:type start_time: ~datetime.datetime
:param monitoring_status: The monitoring status of the connection monitor.
:type monitoring_status: str
"""
_validation = {
'source': {'required': True},
'destination': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'source': {'key': 'source', 'type': 'ConnectionMonitorSource'},
'destination': {'key': 'destination', 'type': 'ConnectionMonitorDestination'},
'auto_start': {'key': 'autoStart', 'type': 'bool'},
'monitoring_interval_in_seconds': {'key': 'monitoringIntervalInSeconds', 'type': 'int'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'monitoring_status': {'key': 'monitoringStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorResultProperties, self).__init__(**kwargs)
self.provisioning_state = None
self.start_time = kwargs.get('start_time', None)
self.monitoring_status = kwargs.get('monitoring_status', None)
class ConnectionMonitorSource(msrest.serialization.Model):
"""Describes the source of connection monitor.
All required parameters must be populated in order to send to Azure.
:param resource_id: Required. The ID of the resource used as the source by connection monitor.
:type resource_id: str
:param port: The source port used by connection monitor.
:type port: int
"""
_validation = {
'resource_id': {'required': True},
}
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionMonitorSource, self).__init__(**kwargs)
self.resource_id = kwargs['resource_id']
self.port = kwargs.get('port', None)
class ConnectionResetSharedKey(msrest.serialization.Model):
"""The virtual network connection reset shared key.
All required parameters must be populated in order to send to Azure.
:param key_length: Required. The virtual network connection reset shared key length, should
between 1 and 128.
:type key_length: int
"""
_validation = {
'key_length': {'required': True, 'maximum': 128, 'minimum': 1},
}
_attribute_map = {
'key_length': {'key': 'keyLength', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectionResetSharedKey, self).__init__(**kwargs)
self.key_length = kwargs['key_length']
class ConnectionSharedKey(SubResource):
"""Response for GetConnectionSharedKey API service call.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:param value: Required. The virtual network connection shared key value.
:type value: str
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ConnectionSharedKey, self).__init__(**kwargs)
self.value = kwargs['value']
class ConnectionStateSnapshot(msrest.serialization.Model):
"""Connection state snapshot.
Variables are only populated by the server, and will be ignored when sending a request.
:param connection_state: The connection state. Possible values include: "Reachable",
"Unreachable", "Unknown".
:type connection_state: str or ~azure.mgmt.network.v2019_09_01.models.ConnectionState
:param start_time: The start time of the connection snapshot.
:type start_time: ~datetime.datetime
:param end_time: The end time of the connection snapshot.
:type end_time: ~datetime.datetime
:param evaluation_state: Connectivity analysis evaluation state. Possible values include:
"NotStarted", "InProgress", "Completed".
:type evaluation_state: str or ~azure.mgmt.network.v2019_09_01.models.EvaluationState
:param avg_latency_in_ms: Average latency in ms.
:type avg_latency_in_ms: int
:param min_latency_in_ms: Minimum latency in ms.
:type min_latency_in_ms: int
:param max_latency_in_ms: Maximum latency in ms.
:type max_latency_in_ms: int
:param probes_sent: The number of sent probes.
:type probes_sent: int
:param probes_failed: The number of failed probes.
:type probes_failed: int
:ivar hops: List of hops between the source and the destination.
:vartype hops: list[~azure.mgmt.network.v2019_09_01.models.ConnectivityHop]
"""
_validation = {
'hops': {'readonly': True},
}
_attribute_map = {
'connection_state': {'key': 'connectionState', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'evaluation_state': {'key': 'evaluationState', 'type': 'str'},
'avg_latency_in_ms': {'key': 'avgLatencyInMs', 'type': 'int'},
'min_latency_in_ms': {'key': 'minLatencyInMs', 'type': 'int'},
'max_latency_in_ms': {'key': 'maxLatencyInMs', 'type': 'int'},
'probes_sent': {'key': 'probesSent', 'type': 'int'},
'probes_failed': {'key': 'probesFailed', 'type': 'int'},
'hops': {'key': 'hops', 'type': '[ConnectivityHop]'},
}
def __init__(
self,
**kwargs
):
super(ConnectionStateSnapshot, self).__init__(**kwargs)
self.connection_state = kwargs.get('connection_state', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.evaluation_state = kwargs.get('evaluation_state', None)
self.avg_latency_in_ms = kwargs.get('avg_latency_in_ms', None)
self.min_latency_in_ms = kwargs.get('min_latency_in_ms', None)
self.max_latency_in_ms = kwargs.get('max_latency_in_ms', None)
self.probes_sent = kwargs.get('probes_sent', None)
self.probes_failed = kwargs.get('probes_failed', None)
self.hops = None
class ConnectivityDestination(msrest.serialization.Model):
"""Parameters that define destination of connection.
:param resource_id: The ID of the resource to which a connection attempt will be made.
:type resource_id: str
:param address: The IP address or URI the resource to which a connection attempt will be made.
:type address: str
:param port: Port on which check connectivity will be performed.
:type port: int
"""
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityDestination, self).__init__(**kwargs)
self.resource_id = kwargs.get('resource_id', None)
self.address = kwargs.get('address', None)
self.port = kwargs.get('port', None)
class ConnectivityHop(msrest.serialization.Model):
"""Information about a hop between the source and the destination.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The type of the hop.
:vartype type: str
:ivar id: The ID of the hop.
:vartype id: str
:ivar address: The IP address of the hop.
:vartype address: str
:ivar resource_id: The ID of the resource corresponding to this hop.
:vartype resource_id: str
:ivar next_hop_ids: List of next hop identifiers.
:vartype next_hop_ids: list[str]
:ivar issues: List of issues.
:vartype issues: list[~azure.mgmt.network.v2019_09_01.models.ConnectivityIssue]
"""
_validation = {
'type': {'readonly': True},
'id': {'readonly': True},
'address': {'readonly': True},
'resource_id': {'readonly': True},
'next_hop_ids': {'readonly': True},
'issues': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'next_hop_ids': {'key': 'nextHopIds', 'type': '[str]'},
'issues': {'key': 'issues', 'type': '[ConnectivityIssue]'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityHop, self).__init__(**kwargs)
self.type = None
self.id = None
self.address = None
self.resource_id = None
self.next_hop_ids = None
self.issues = None
class ConnectivityInformation(msrest.serialization.Model):
"""Information on the connectivity status.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar hops: List of hops between the source and the destination.
:vartype hops: list[~azure.mgmt.network.v2019_09_01.models.ConnectivityHop]
:ivar connection_status: The connection status. Possible values include: "Unknown",
"Connected", "Disconnected", "Degraded".
:vartype connection_status: str or ~azure.mgmt.network.v2019_09_01.models.ConnectionStatus
:ivar avg_latency_in_ms: Average latency in milliseconds.
:vartype avg_latency_in_ms: int
:ivar min_latency_in_ms: Minimum latency in milliseconds.
:vartype min_latency_in_ms: int
:ivar max_latency_in_ms: Maximum latency in milliseconds.
:vartype max_latency_in_ms: int
:ivar probes_sent: Total number of probes sent.
:vartype probes_sent: int
:ivar probes_failed: Number of failed probes.
:vartype probes_failed: int
"""
_validation = {
'hops': {'readonly': True},
'connection_status': {'readonly': True},
'avg_latency_in_ms': {'readonly': True},
'min_latency_in_ms': {'readonly': True},
'max_latency_in_ms': {'readonly': True},
'probes_sent': {'readonly': True},
'probes_failed': {'readonly': True},
}
_attribute_map = {
'hops': {'key': 'hops', 'type': '[ConnectivityHop]'},
'connection_status': {'key': 'connectionStatus', 'type': 'str'},
'avg_latency_in_ms': {'key': 'avgLatencyInMs', 'type': 'int'},
'min_latency_in_ms': {'key': 'minLatencyInMs', 'type': 'int'},
'max_latency_in_ms': {'key': 'maxLatencyInMs', 'type': 'int'},
'probes_sent': {'key': 'probesSent', 'type': 'int'},
'probes_failed': {'key': 'probesFailed', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityInformation, self).__init__(**kwargs)
self.hops = None
self.connection_status = None
self.avg_latency_in_ms = None
self.min_latency_in_ms = None
self.max_latency_in_ms = None
self.probes_sent = None
self.probes_failed = None
class ConnectivityIssue(msrest.serialization.Model):
"""Information about an issue encountered in the process of checking for connectivity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar origin: The origin of the issue. Possible values include: "Local", "Inbound", "Outbound".
:vartype origin: str or ~azure.mgmt.network.v2019_09_01.models.Origin
:ivar severity: The severity of the issue. Possible values include: "Error", "Warning".
:vartype severity: str or ~azure.mgmt.network.v2019_09_01.models.Severity
:ivar type: The type of issue. Possible values include: "Unknown", "AgentStopped",
"GuestFirewall", "DnsResolution", "SocketBind", "NetworkSecurityRule", "UserDefinedRoute",
"PortThrottled", "Platform".
:vartype type: str or ~azure.mgmt.network.v2019_09_01.models.IssueType
:ivar context: Provides additional context on the issue.
:vartype context: list[dict[str, str]]
"""
_validation = {
'origin': {'readonly': True},
'severity': {'readonly': True},
'type': {'readonly': True},
'context': {'readonly': True},
}
_attribute_map = {
'origin': {'key': 'origin', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'context': {'key': 'context', 'type': '[{str}]'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityIssue, self).__init__(**kwargs)
self.origin = None
self.severity = None
self.type = None
self.context = None
class ConnectivityParameters(msrest.serialization.Model):
"""Parameters that determine how the connectivity check will be performed.
All required parameters must be populated in order to send to Azure.
:param source: Required. Describes the source of the connection.
:type source: ~azure.mgmt.network.v2019_09_01.models.ConnectivitySource
:param destination: Required. Describes the destination of connection.
:type destination: ~azure.mgmt.network.v2019_09_01.models.ConnectivityDestination
:param protocol: Network protocol. Possible values include: "Tcp", "Http", "Https", "Icmp".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.Protocol
:param protocol_configuration: Configuration of the protocol.
:type protocol_configuration: ~azure.mgmt.network.v2019_09_01.models.ProtocolConfiguration
:param preferred_ip_version: Preferred IP version of the connection. Possible values include:
"IPv4", "IPv6".
:type preferred_ip_version: str or ~azure.mgmt.network.v2019_09_01.models.IPVersion
"""
_validation = {
'source': {'required': True},
'destination': {'required': True},
}
_attribute_map = {
'source': {'key': 'source', 'type': 'ConnectivitySource'},
'destination': {'key': 'destination', 'type': 'ConnectivityDestination'},
'protocol': {'key': 'protocol', 'type': 'str'},
'protocol_configuration': {'key': 'protocolConfiguration', 'type': 'ProtocolConfiguration'},
'preferred_ip_version': {'key': 'preferredIPVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ConnectivityParameters, self).__init__(**kwargs)
self.source = kwargs['source']
self.destination = kwargs['destination']
self.protocol = kwargs.get('protocol', None)
self.protocol_configuration = kwargs.get('protocol_configuration', None)
self.preferred_ip_version = kwargs.get('preferred_ip_version', None)
class ConnectivitySource(msrest.serialization.Model):
"""Parameters that define the source of the connection.
All required parameters must be populated in order to send to Azure.
:param resource_id: Required. The ID of the resource from which a connectivity check will be
initiated.
:type resource_id: str
:param port: The source port from which a connectivity check will be performed.
:type port: int
"""
_validation = {
'resource_id': {'required': True},
}
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ConnectivitySource, self).__init__(**kwargs)
self.resource_id = kwargs['resource_id']
self.port = kwargs.get('port', None)
class Container(SubResource):
"""Reference to container resource in remote resource provider.
:param id: Resource ID.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Container, self).__init__(**kwargs)
class ContainerNetworkInterface(SubResource):
"""Container network interface child resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource. This name can be used to access the resource.
:type name: str
:ivar type: Sub Resource type.
:vartype type: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar container_network_interface_configuration: Container network interface configuration from
which this container network interface is created.
:vartype container_network_interface_configuration:
~azure.mgmt.network.v2019_09_01.models.ContainerNetworkInterfaceConfiguration
:param container: Reference to the container to which this container network interface is
attached.
:type container: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar ip_configurations: Reference to the ip configuration on this container nic.
:vartype ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.ContainerNetworkInterfaceIpConfiguration]
:ivar provisioning_state: The provisioning state of the container network interface resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'type': {'readonly': True},
'etag': {'readonly': True},
'container_network_interface_configuration': {'readonly': True},
'ip_configurations': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'container_network_interface_configuration': {'key': 'properties.containerNetworkInterfaceConfiguration', 'type': 'ContainerNetworkInterfaceConfiguration'},
'container': {'key': 'properties.container', 'type': 'SubResource'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[ContainerNetworkInterfaceIpConfiguration]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerNetworkInterface, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = None
self.container_network_interface_configuration = None
self.container = kwargs.get('container', None)
self.ip_configurations = None
self.provisioning_state = None
class ContainerNetworkInterfaceConfiguration(SubResource):
"""Container network interface configuration child resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource. This name can be used to access the resource.
:type name: str
:ivar type: Sub Resource type.
:vartype type: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param ip_configurations: A list of ip configurations of the container network interface
configuration.
:type ip_configurations: list[~azure.mgmt.network.v2019_09_01.models.IPConfigurationProfile]
:param container_network_interfaces: A list of container network interfaces created from this
container network interface configuration.
:type container_network_interfaces: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar provisioning_state: The provisioning state of the container network interface
configuration resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[IPConfigurationProfile]'},
'container_network_interfaces': {'key': 'properties.containerNetworkInterfaces', 'type': '[SubResource]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerNetworkInterfaceConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = None
self.ip_configurations = kwargs.get('ip_configurations', None)
self.container_network_interfaces = kwargs.get('container_network_interfaces', None)
self.provisioning_state = None
class ContainerNetworkInterfaceIpConfiguration(msrest.serialization.Model):
"""The ip configuration for a container network interface.
Variables are only populated by the server, and will be ignored when sending a request.
:param name: The name of the resource. This name can be used to access the resource.
:type name: str
:ivar type: Sub Resource type.
:vartype type: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar provisioning_state: The provisioning state of the container network interface IP
configuration resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ContainerNetworkInterfaceIpConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = None
self.provisioning_state = None
class DdosCustomPolicy(Resource):
"""A DDoS custom policy in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar resource_guid: The resource GUID property of the DDoS custom policy resource. It uniquely
identifies the resource, even if the user changes its name or migrate the resource across
subscriptions or resource groups.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the DDoS custom policy resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:ivar public_ip_addresses: The list of public IPs associated with the DDoS custom policy
resource. This list is read-only.
:vartype public_ip_addresses: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param protocol_custom_settings: The protocol-specific DDoS policy customization parameters.
:type protocol_custom_settings:
list[~azure.mgmt.network.v2019_09_01.models.ProtocolCustomSettingsFormat]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
'public_ip_addresses': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'public_ip_addresses': {'key': 'properties.publicIPAddresses', 'type': '[SubResource]'},
'protocol_custom_settings': {'key': 'properties.protocolCustomSettings', 'type': '[ProtocolCustomSettingsFormat]'},
}
def __init__(
self,
**kwargs
):
super(DdosCustomPolicy, self).__init__(**kwargs)
self.etag = None
self.resource_guid = None
self.provisioning_state = None
self.public_ip_addresses = None
self.protocol_custom_settings = kwargs.get('protocol_custom_settings', None)
class DdosProtectionPlan(msrest.serialization.Model):
"""A DDoS protection plan in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar resource_guid: The resource GUID property of the DDoS protection plan resource. It
uniquely identifies the resource, even if the user changes its name or migrate the resource
across subscriptions or resource groups.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the DDoS protection plan resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:ivar virtual_networks: The list of virtual networks associated with the DDoS protection plan
resource. This list is read-only.
:vartype virtual_networks: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
'virtual_networks': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'virtual_networks': {'key': 'properties.virtualNetworks', 'type': '[SubResource]'},
}
def __init__(
self,
**kwargs
):
super(DdosProtectionPlan, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.location = kwargs.get('location', None)
self.tags = kwargs.get('tags', None)
self.etag = None
self.resource_guid = None
self.provisioning_state = None
self.virtual_networks = None
class DdosProtectionPlanListResult(msrest.serialization.Model):
"""A list of DDoS protection plans.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of DDoS protection plans.
:type value: list[~azure.mgmt.network.v2019_09_01.models.DdosProtectionPlan]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[DdosProtectionPlan]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DdosProtectionPlanListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class DdosSettings(msrest.serialization.Model):
"""Contains the DDoS protection settings of the public IP.
:param ddos_custom_policy: The DDoS custom policy associated with the public IP.
:type ddos_custom_policy: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param protection_coverage: The DDoS protection policy customizability of the public IP. Only
standard coverage will have the ability to be customized. Possible values include: "Basic",
"Standard".
:type protection_coverage: str or
~azure.mgmt.network.v2019_09_01.models.DdosSettingsProtectionCoverage
"""
_attribute_map = {
'ddos_custom_policy': {'key': 'ddosCustomPolicy', 'type': 'SubResource'},
'protection_coverage': {'key': 'protectionCoverage', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DdosSettings, self).__init__(**kwargs)
self.ddos_custom_policy = kwargs.get('ddos_custom_policy', None)
self.protection_coverage = kwargs.get('protection_coverage', None)
class Delegation(SubResource):
"""Details the service to which the subnet is delegated.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a subnet. This name can be used to
access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param service_name: The name of the service to whom the subnet should be delegated (e.g.
Microsoft.Sql/servers).
:type service_name: str
:ivar actions: Describes the actions permitted to the service upon delegation.
:vartype actions: list[str]
:ivar provisioning_state: The provisioning state of the service delegation resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'actions': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'service_name': {'key': 'properties.serviceName', 'type': 'str'},
'actions': {'key': 'properties.actions', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Delegation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.service_name = kwargs.get('service_name', None)
self.actions = None
self.provisioning_state = None
class DeviceProperties(msrest.serialization.Model):
"""List of properties of the device.
:param device_vendor: Name of the device Vendor.
:type device_vendor: str
:param device_model: Model of the device.
:type device_model: str
:param link_speed_in_mbps: Link speed.
:type link_speed_in_mbps: int
"""
_attribute_map = {
'device_vendor': {'key': 'deviceVendor', 'type': 'str'},
'device_model': {'key': 'deviceModel', 'type': 'str'},
'link_speed_in_mbps': {'key': 'linkSpeedInMbps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(DeviceProperties, self).__init__(**kwargs)
self.device_vendor = kwargs.get('device_vendor', None)
self.device_model = kwargs.get('device_model', None)
self.link_speed_in_mbps = kwargs.get('link_speed_in_mbps', None)
class DhcpOptions(msrest.serialization.Model):
"""DhcpOptions contains an array of DNS servers available to VMs deployed in the virtual network. Standard DHCP option for a subnet overrides VNET DHCP options.
:param dns_servers: The list of DNS servers IP addresses.
:type dns_servers: list[str]
"""
_attribute_map = {
'dns_servers': {'key': 'dnsServers', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(DhcpOptions, self).__init__(**kwargs)
self.dns_servers = kwargs.get('dns_servers', None)
class Dimension(msrest.serialization.Model):
"""Dimension of the metric.
:param name: The name of the dimension.
:type name: str
:param display_name: The display name of the dimension.
:type display_name: str
:param internal_name: The internal name of the dimension.
:type internal_name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'internal_name': {'key': 'internalName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Dimension, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.internal_name = kwargs.get('internal_name', None)
class DnsNameAvailabilityResult(msrest.serialization.Model):
"""Response for the CheckDnsNameAvailability API service call.
:param available: Domain availability (True/False).
:type available: bool
"""
_attribute_map = {
'available': {'key': 'available', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(DnsNameAvailabilityResult, self).__init__(**kwargs)
self.available = kwargs.get('available', None)
class EffectiveNetworkSecurityGroup(msrest.serialization.Model):
"""Effective network security group.
:param network_security_group: The ID of network security group that is applied.
:type network_security_group: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param association: Associated resources.
:type association:
~azure.mgmt.network.v2019_09_01.models.EffectiveNetworkSecurityGroupAssociation
:param effective_security_rules: A collection of effective security rules.
:type effective_security_rules:
list[~azure.mgmt.network.v2019_09_01.models.EffectiveNetworkSecurityRule]
:param tag_map: Mapping of tags to list of IP Addresses included within the tag.
:type tag_map: str
"""
_attribute_map = {
'network_security_group': {'key': 'networkSecurityGroup', 'type': 'SubResource'},
'association': {'key': 'association', 'type': 'EffectiveNetworkSecurityGroupAssociation'},
'effective_security_rules': {'key': 'effectiveSecurityRules', 'type': '[EffectiveNetworkSecurityRule]'},
'tag_map': {'key': 'tagMap', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveNetworkSecurityGroup, self).__init__(**kwargs)
self.network_security_group = kwargs.get('network_security_group', None)
self.association = kwargs.get('association', None)
self.effective_security_rules = kwargs.get('effective_security_rules', None)
self.tag_map = kwargs.get('tag_map', None)
class EffectiveNetworkSecurityGroupAssociation(msrest.serialization.Model):
"""The effective network security group association.
:param subnet: The ID of the subnet if assigned.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param network_interface: The ID of the network interface if assigned.
:type network_interface: ~azure.mgmt.network.v2019_09_01.models.SubResource
"""
_attribute_map = {
'subnet': {'key': 'subnet', 'type': 'SubResource'},
'network_interface': {'key': 'networkInterface', 'type': 'SubResource'},
}
def __init__(
self,
**kwargs
):
super(EffectiveNetworkSecurityGroupAssociation, self).__init__(**kwargs)
self.subnet = kwargs.get('subnet', None)
self.network_interface = kwargs.get('network_interface', None)
class EffectiveNetworkSecurityGroupListResult(msrest.serialization.Model):
"""Response for list effective network security groups API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of effective network security groups.
:type value: list[~azure.mgmt.network.v2019_09_01.models.EffectiveNetworkSecurityGroup]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[EffectiveNetworkSecurityGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveNetworkSecurityGroupListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class EffectiveNetworkSecurityRule(msrest.serialization.Model):
"""Effective network security rules.
:param name: The name of the security rule specified by the user (if created by the user).
:type name: str
:param protocol: The network protocol this rule applies to. Possible values include: "Tcp",
"Udp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.EffectiveSecurityRuleProtocol
:param source_port_range: The source port or range.
:type source_port_range: str
:param destination_port_range: The destination port or range.
:type destination_port_range: str
:param source_port_ranges: The source port ranges. Expected values include a single integer
between 0 and 65535, a range using '-' as separator (e.g. 100-400), or an asterisk (*).
:type source_port_ranges: list[str]
:param destination_port_ranges: The destination port ranges. Expected values include a single
integer between 0 and 65535, a range using '-' as separator (e.g. 100-400), or an asterisk (*).
:type destination_port_ranges: list[str]
:param source_address_prefix: The source address prefix.
:type source_address_prefix: str
:param destination_address_prefix: The destination address prefix.
:type destination_address_prefix: str
:param source_address_prefixes: The source address prefixes. Expected values include CIDR IP
ranges, Default Tags (VirtualNetwork, AzureLoadBalancer, Internet), System Tags, and the
asterisk (*).
:type source_address_prefixes: list[str]
:param destination_address_prefixes: The destination address prefixes. Expected values include
CIDR IP ranges, Default Tags (VirtualNetwork, AzureLoadBalancer, Internet), System Tags, and
the asterisk (*).
:type destination_address_prefixes: list[str]
:param expanded_source_address_prefix: The expanded source address prefix.
:type expanded_source_address_prefix: list[str]
:param expanded_destination_address_prefix: Expanded destination address prefix.
:type expanded_destination_address_prefix: list[str]
:param access: Whether network traffic is allowed or denied. Possible values include: "Allow",
"Deny".
:type access: str or ~azure.mgmt.network.v2019_09_01.models.SecurityRuleAccess
:param priority: The priority of the rule.
:type priority: int
:param direction: The direction of the rule. Possible values include: "Inbound", "Outbound".
:type direction: str or ~azure.mgmt.network.v2019_09_01.models.SecurityRuleDirection
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'source_port_range': {'key': 'sourcePortRange', 'type': 'str'},
'destination_port_range': {'key': 'destinationPortRange', 'type': 'str'},
'source_port_ranges': {'key': 'sourcePortRanges', 'type': '[str]'},
'destination_port_ranges': {'key': 'destinationPortRanges', 'type': '[str]'},
'source_address_prefix': {'key': 'sourceAddressPrefix', 'type': 'str'},
'destination_address_prefix': {'key': 'destinationAddressPrefix', 'type': 'str'},
'source_address_prefixes': {'key': 'sourceAddressPrefixes', 'type': '[str]'},
'destination_address_prefixes': {'key': 'destinationAddressPrefixes', 'type': '[str]'},
'expanded_source_address_prefix': {'key': 'expandedSourceAddressPrefix', 'type': '[str]'},
'expanded_destination_address_prefix': {'key': 'expandedDestinationAddressPrefix', 'type': '[str]'},
'access': {'key': 'access', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'direction': {'key': 'direction', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveNetworkSecurityRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.protocol = kwargs.get('protocol', None)
self.source_port_range = kwargs.get('source_port_range', None)
self.destination_port_range = kwargs.get('destination_port_range', None)
self.source_port_ranges = kwargs.get('source_port_ranges', None)
self.destination_port_ranges = kwargs.get('destination_port_ranges', None)
self.source_address_prefix = kwargs.get('source_address_prefix', None)
self.destination_address_prefix = kwargs.get('destination_address_prefix', None)
self.source_address_prefixes = kwargs.get('source_address_prefixes', None)
self.destination_address_prefixes = kwargs.get('destination_address_prefixes', None)
self.expanded_source_address_prefix = kwargs.get('expanded_source_address_prefix', None)
self.expanded_destination_address_prefix = kwargs.get('expanded_destination_address_prefix', None)
self.access = kwargs.get('access', None)
self.priority = kwargs.get('priority', None)
self.direction = kwargs.get('direction', None)
class EffectiveRoute(msrest.serialization.Model):
"""Effective Route.
:param name: The name of the user defined route. This is optional.
:type name: str
:param disable_bgp_route_propagation: If true, on-premises routes are not propagated to the
network interfaces in the subnet.
:type disable_bgp_route_propagation: bool
:param source: Who created the route. Possible values include: "Unknown", "User",
"VirtualNetworkGateway", "Default".
:type source: str or ~azure.mgmt.network.v2019_09_01.models.EffectiveRouteSource
:param state: The value of effective route. Possible values include: "Active", "Invalid".
:type state: str or ~azure.mgmt.network.v2019_09_01.models.EffectiveRouteState
:param address_prefix: The address prefixes of the effective routes in CIDR notation.
:type address_prefix: list[str]
:param next_hop_ip_address: The IP address of the next hop of the effective route.
:type next_hop_ip_address: list[str]
:param next_hop_type: The type of Azure hop the packet should be sent to. Possible values
include: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", "None".
:type next_hop_type: str or ~azure.mgmt.network.v2019_09_01.models.RouteNextHopType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'disable_bgp_route_propagation': {'key': 'disableBgpRoutePropagation', 'type': 'bool'},
'source': {'key': 'source', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
'address_prefix': {'key': 'addressPrefix', 'type': '[str]'},
'next_hop_ip_address': {'key': 'nextHopIpAddress', 'type': '[str]'},
'next_hop_type': {'key': 'nextHopType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveRoute, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.disable_bgp_route_propagation = kwargs.get('disable_bgp_route_propagation', None)
self.source = kwargs.get('source', None)
self.state = kwargs.get('state', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.next_hop_ip_address = kwargs.get('next_hop_ip_address', None)
self.next_hop_type = kwargs.get('next_hop_type', None)
class EffectiveRouteListResult(msrest.serialization.Model):
"""Response for list effective route API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of effective routes.
:type value: list[~azure.mgmt.network.v2019_09_01.models.EffectiveRoute]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[EffectiveRoute]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EffectiveRouteListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class EndpointServiceResult(SubResource):
"""Endpoint service.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Name of the endpoint service.
:vartype name: str
:ivar type: Type of the endpoint service.
:vartype type: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EndpointServiceResult, self).__init__(**kwargs)
self.name = None
self.type = None
class EndpointServicesListResult(msrest.serialization.Model):
"""Response for the ListAvailableEndpointServices API service call.
:param value: List of available endpoint services in a region.
:type value: list[~azure.mgmt.network.v2019_09_01.models.EndpointServiceResult]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[EndpointServiceResult]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EndpointServicesListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class Error(msrest.serialization.Model):
"""Common error representation.
:param code: Error code.
:type code: str
:param message: Error message.
:type message: str
:param target: Error target.
:type target: str
:param details: Error details.
:type details: list[~azure.mgmt.network.v2019_09_01.models.ErrorDetails]
:param inner_error: Inner error message.
:type inner_error: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[ErrorDetails]'},
'inner_error': {'key': 'innerError', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Error, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.target = kwargs.get('target', None)
self.details = kwargs.get('details', None)
self.inner_error = kwargs.get('inner_error', None)
class ErrorDetails(msrest.serialization.Model):
"""Common error details representation.
:param code: Error code.
:type code: str
:param target: Error target.
:type target: str
:param message: Error message.
:type message: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ErrorDetails, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.target = kwargs.get('target', None)
self.message = kwargs.get('message', None)
class ErrorResponse(msrest.serialization.Model):
"""The error object.
:param error: The error details object.
:type error: ~azure.mgmt.network.v2019_09_01.models.ErrorDetails
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorDetails'},
}
def __init__(
self,
**kwargs
):
super(ErrorResponse, self).__init__(**kwargs)
self.error = kwargs.get('error', None)
class EvaluatedNetworkSecurityGroup(msrest.serialization.Model):
"""Results of network security group evaluation.
Variables are only populated by the server, and will be ignored when sending a request.
:param network_security_group_id: Network security group ID.
:type network_security_group_id: str
:param applied_to: Resource ID of nic or subnet to which network security group is applied.
:type applied_to: str
:param matched_rule: Matched network security rule.
:type matched_rule: ~azure.mgmt.network.v2019_09_01.models.MatchedRule
:ivar rules_evaluation_result: List of network security rules evaluation results.
:vartype rules_evaluation_result:
list[~azure.mgmt.network.v2019_09_01.models.NetworkSecurityRulesEvaluationResult]
"""
_validation = {
'rules_evaluation_result': {'readonly': True},
}
_attribute_map = {
'network_security_group_id': {'key': 'networkSecurityGroupId', 'type': 'str'},
'applied_to': {'key': 'appliedTo', 'type': 'str'},
'matched_rule': {'key': 'matchedRule', 'type': 'MatchedRule'},
'rules_evaluation_result': {'key': 'rulesEvaluationResult', 'type': '[NetworkSecurityRulesEvaluationResult]'},
}
def __init__(
self,
**kwargs
):
super(EvaluatedNetworkSecurityGroup, self).__init__(**kwargs)
self.network_security_group_id = kwargs.get('network_security_group_id', None)
self.applied_to = kwargs.get('applied_to', None)
self.matched_rule = kwargs.get('matched_rule', None)
self.rules_evaluation_result = None
class ExpressRouteCircuit(Resource):
"""ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The SKU.
:type sku: ~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitSku
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param allow_classic_operations: Allow classic operations.
:type allow_classic_operations: bool
:param circuit_provisioning_state: The CircuitProvisioningState state of the resource.
:type circuit_provisioning_state: str
:param service_provider_provisioning_state: The ServiceProviderProvisioningState state of the
resource. Possible values include: "NotProvisioned", "Provisioning", "Provisioned",
"Deprovisioning".
:type service_provider_provisioning_state: str or
~azure.mgmt.network.v2019_09_01.models.ServiceProviderProvisioningState
:param authorizations: The list of authorizations.
:type authorizations:
list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitAuthorization]
:param peerings: The list of peerings.
:type peerings: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeering]
:param service_key: The ServiceKey.
:type service_key: str
:param service_provider_notes: The ServiceProviderNotes.
:type service_provider_notes: str
:param service_provider_properties: The ServiceProviderProperties.
:type service_provider_properties:
~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitServiceProviderProperties
:param express_route_port: The reference to the ExpressRoutePort resource when the circuit is
provisioned on an ExpressRoutePort resource.
:type express_route_port: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param bandwidth_in_gbps: The bandwidth of the circuit when the circuit is provisioned on an
ExpressRoutePort resource.
:type bandwidth_in_gbps: float
:ivar stag: The identifier of the circuit traffic. Outer tag for QinQ encapsulation.
:vartype stag: int
:ivar provisioning_state: The provisioning state of the express route circuit resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param gateway_manager_etag: The GatewayManager Etag.
:type gateway_manager_etag: str
:param global_reach_enabled: Flag denoting Global reach status.
:type global_reach_enabled: bool
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'stag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'ExpressRouteCircuitSku'},
'etag': {'key': 'etag', 'type': 'str'},
'allow_classic_operations': {'key': 'properties.allowClassicOperations', 'type': 'bool'},
'circuit_provisioning_state': {'key': 'properties.circuitProvisioningState', 'type': 'str'},
'service_provider_provisioning_state': {'key': 'properties.serviceProviderProvisioningState', 'type': 'str'},
'authorizations': {'key': 'properties.authorizations', 'type': '[ExpressRouteCircuitAuthorization]'},
'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCircuitPeering]'},
'service_key': {'key': 'properties.serviceKey', 'type': 'str'},
'service_provider_notes': {'key': 'properties.serviceProviderNotes', 'type': 'str'},
'service_provider_properties': {'key': 'properties.serviceProviderProperties', 'type': 'ExpressRouteCircuitServiceProviderProperties'},
'express_route_port': {'key': 'properties.expressRoutePort', 'type': 'SubResource'},
'bandwidth_in_gbps': {'key': 'properties.bandwidthInGbps', 'type': 'float'},
'stag': {'key': 'properties.stag', 'type': 'int'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'gateway_manager_etag': {'key': 'properties.gatewayManagerEtag', 'type': 'str'},
'global_reach_enabled': {'key': 'properties.globalReachEnabled', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuit, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.etag = None
self.allow_classic_operations = kwargs.get('allow_classic_operations', None)
self.circuit_provisioning_state = kwargs.get('circuit_provisioning_state', None)
self.service_provider_provisioning_state = kwargs.get('service_provider_provisioning_state', None)
self.authorizations = kwargs.get('authorizations', None)
self.peerings = kwargs.get('peerings', None)
self.service_key = kwargs.get('service_key', None)
self.service_provider_notes = kwargs.get('service_provider_notes', None)
self.service_provider_properties = kwargs.get('service_provider_properties', None)
self.express_route_port = kwargs.get('express_route_port', None)
self.bandwidth_in_gbps = kwargs.get('bandwidth_in_gbps', None)
self.stag = None
self.provisioning_state = None
self.gateway_manager_etag = kwargs.get('gateway_manager_etag', None)
self.global_reach_enabled = kwargs.get('global_reach_enabled', None)
class ExpressRouteCircuitArpTable(msrest.serialization.Model):
"""The ARP table associated with the ExpressRouteCircuit.
:param age: Entry age in minutes.
:type age: int
:param interface: Interface address.
:type interface: str
:param ip_address: The IP address.
:type ip_address: str
:param mac_address: The MAC address.
:type mac_address: str
"""
_attribute_map = {
'age': {'key': 'age', 'type': 'int'},
'interface': {'key': 'interface', 'type': 'str'},
'ip_address': {'key': 'ipAddress', 'type': 'str'},
'mac_address': {'key': 'macAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitArpTable, self).__init__(**kwargs)
self.age = kwargs.get('age', None)
self.interface = kwargs.get('interface', None)
self.ip_address = kwargs.get('ip_address', None)
self.mac_address = kwargs.get('mac_address', None)
class ExpressRouteCircuitAuthorization(SubResource):
"""Authorization in an ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param authorization_key: The authorization key.
:type authorization_key: str
:param authorization_use_status: The authorization use status. Possible values include:
"Available", "InUse".
:type authorization_use_status: str or
~azure.mgmt.network.v2019_09_01.models.AuthorizationUseStatus
:ivar provisioning_state: The provisioning state of the authorization resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'authorization_use_status': {'key': 'properties.authorizationUseStatus', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitAuthorization, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.authorization_key = kwargs.get('authorization_key', None)
self.authorization_use_status = kwargs.get('authorization_use_status', None)
self.provisioning_state = None
class ExpressRouteCircuitConnection(SubResource):
"""Express Route Circuit Connection in an ExpressRouteCircuitPeering resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param express_route_circuit_peering: Reference to Express Route Circuit Private Peering
Resource of the circuit initiating connection.
:type express_route_circuit_peering: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param peer_express_route_circuit_peering: Reference to Express Route Circuit Private Peering
Resource of the peered circuit.
:type peer_express_route_circuit_peering: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param address_prefix: /29 IP address space to carve out Customer addresses for tunnels.
:type address_prefix: str
:param authorization_key: The authorization key.
:type authorization_key: str
:ivar circuit_connection_status: Express Route Circuit connection state. Possible values
include: "Connected", "Connecting", "Disconnected".
:vartype circuit_connection_status: str or
~azure.mgmt.network.v2019_09_01.models.CircuitConnectionStatus
:ivar provisioning_state: The provisioning state of the express route circuit connection
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'circuit_connection_status': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'express_route_circuit_peering': {'key': 'properties.expressRouteCircuitPeering', 'type': 'SubResource'},
'peer_express_route_circuit_peering': {'key': 'properties.peerExpressRouteCircuitPeering', 'type': 'SubResource'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'circuit_connection_status': {'key': 'properties.circuitConnectionStatus', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.express_route_circuit_peering = kwargs.get('express_route_circuit_peering', None)
self.peer_express_route_circuit_peering = kwargs.get('peer_express_route_circuit_peering', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.authorization_key = kwargs.get('authorization_key', None)
self.circuit_connection_status = None
self.provisioning_state = None
class ExpressRouteCircuitConnectionListResult(msrest.serialization.Model):
"""Response for ListConnections API service call retrieves all global reach connections that belongs to a Private Peering for an ExpressRouteCircuit.
:param value: The global reach connection associated with Private Peering in an ExpressRoute
Circuit.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitConnection]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitListResult(msrest.serialization.Model):
"""Response for ListExpressRouteCircuit API service call.
:param value: A list of ExpressRouteCircuits in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuit]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuit]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitPeering(SubResource):
"""Peering in an ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param peering_type: The peering type. Possible values include: "AzurePublicPeering",
"AzurePrivatePeering", "MicrosoftPeering".
:type peering_type: str or ~azure.mgmt.network.v2019_09_01.models.ExpressRoutePeeringType
:param state: The peering state. Possible values include: "Disabled", "Enabled".
:type state: str or ~azure.mgmt.network.v2019_09_01.models.ExpressRoutePeeringState
:param azure_asn: The Azure ASN.
:type azure_asn: int
:param peer_asn: The peer ASN.
:type peer_asn: long
:param primary_peer_address_prefix: The primary address prefix.
:type primary_peer_address_prefix: str
:param secondary_peer_address_prefix: The secondary address prefix.
:type secondary_peer_address_prefix: str
:param primary_azure_port: The primary port.
:type primary_azure_port: str
:param secondary_azure_port: The secondary port.
:type secondary_azure_port: str
:param shared_key: The shared key.
:type shared_key: str
:param vlan_id: The VLAN ID.
:type vlan_id: int
:param microsoft_peering_config: The Microsoft peering configuration.
:type microsoft_peering_config:
~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeeringConfig
:param stats: The peering stats of express route circuit.
:type stats: ~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitStats
:ivar provisioning_state: The provisioning state of the express route circuit peering resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param gateway_manager_etag: The GatewayManager Etag.
:type gateway_manager_etag: str
:ivar last_modified_by: Who was the last to modify the peering.
:vartype last_modified_by: str
:param route_filter: The reference of the RouteFilter resource.
:type route_filter: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param ipv6_peering_config: The IPv6 peering configuration.
:type ipv6_peering_config:
~azure.mgmt.network.v2019_09_01.models.Ipv6ExpressRouteCircuitPeeringConfig
:param express_route_connection: The ExpressRoute connection.
:type express_route_connection: ~azure.mgmt.network.v2019_09_01.models.ExpressRouteConnectionId
:param connections: The list of circuit connections associated with Azure Private Peering for
this circuit.
:type connections: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitConnection]
:ivar peered_connections: The list of peered circuit connections associated with Azure Private
Peering for this circuit.
:vartype peered_connections:
list[~azure.mgmt.network.v2019_09_01.models.PeerExpressRouteCircuitConnection]
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'peer_asn': {'maximum': 4294967295, 'minimum': 1},
'provisioning_state': {'readonly': True},
'last_modified_by': {'readonly': True},
'peered_connections': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'peering_type': {'key': 'properties.peeringType', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'azure_asn': {'key': 'properties.azureASN', 'type': 'int'},
'peer_asn': {'key': 'properties.peerASN', 'type': 'long'},
'primary_peer_address_prefix': {'key': 'properties.primaryPeerAddressPrefix', 'type': 'str'},
'secondary_peer_address_prefix': {'key': 'properties.secondaryPeerAddressPrefix', 'type': 'str'},
'primary_azure_port': {'key': 'properties.primaryAzurePort', 'type': 'str'},
'secondary_azure_port': {'key': 'properties.secondaryAzurePort', 'type': 'str'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'vlan_id': {'key': 'properties.vlanId', 'type': 'int'},
'microsoft_peering_config': {'key': 'properties.microsoftPeeringConfig', 'type': 'ExpressRouteCircuitPeeringConfig'},
'stats': {'key': 'properties.stats', 'type': 'ExpressRouteCircuitStats'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'gateway_manager_etag': {'key': 'properties.gatewayManagerEtag', 'type': 'str'},
'last_modified_by': {'key': 'properties.lastModifiedBy', 'type': 'str'},
'route_filter': {'key': 'properties.routeFilter', 'type': 'SubResource'},
'ipv6_peering_config': {'key': 'properties.ipv6PeeringConfig', 'type': 'Ipv6ExpressRouteCircuitPeeringConfig'},
'express_route_connection': {'key': 'properties.expressRouteConnection', 'type': 'ExpressRouteConnectionId'},
'connections': {'key': 'properties.connections', 'type': '[ExpressRouteCircuitConnection]'},
'peered_connections': {'key': 'properties.peeredConnections', 'type': '[PeerExpressRouteCircuitConnection]'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitPeering, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.peering_type = kwargs.get('peering_type', None)
self.state = kwargs.get('state', None)
self.azure_asn = kwargs.get('azure_asn', None)
self.peer_asn = kwargs.get('peer_asn', None)
self.primary_peer_address_prefix = kwargs.get('primary_peer_address_prefix', None)
self.secondary_peer_address_prefix = kwargs.get('secondary_peer_address_prefix', None)
self.primary_azure_port = kwargs.get('primary_azure_port', None)
self.secondary_azure_port = kwargs.get('secondary_azure_port', None)
self.shared_key = kwargs.get('shared_key', None)
self.vlan_id = kwargs.get('vlan_id', None)
self.microsoft_peering_config = kwargs.get('microsoft_peering_config', None)
self.stats = kwargs.get('stats', None)
self.provisioning_state = None
self.gateway_manager_etag = kwargs.get('gateway_manager_etag', None)
self.last_modified_by = None
self.route_filter = kwargs.get('route_filter', None)
self.ipv6_peering_config = kwargs.get('ipv6_peering_config', None)
self.express_route_connection = kwargs.get('express_route_connection', None)
self.connections = kwargs.get('connections', None)
self.peered_connections = None
class ExpressRouteCircuitPeeringConfig(msrest.serialization.Model):
"""Specifies the peering configuration.
Variables are only populated by the server, and will be ignored when sending a request.
:param advertised_public_prefixes: The reference of AdvertisedPublicPrefixes.
:type advertised_public_prefixes: list[str]
:param advertised_communities: The communities of bgp peering. Specified for microsoft peering.
:type advertised_communities: list[str]
:ivar advertised_public_prefixes_state: The advertised public prefix state of the Peering
resource. Possible values include: "NotConfigured", "Configuring", "Configured",
"ValidationNeeded".
:vartype advertised_public_prefixes_state: str or
~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeeringAdvertisedPublicPrefixState
:param legacy_mode: The legacy mode of the peering.
:type legacy_mode: int
:param customer_asn: The CustomerASN of the peering.
:type customer_asn: int
:param routing_registry_name: The RoutingRegistryName of the configuration.
:type routing_registry_name: str
"""
_validation = {
'advertised_public_prefixes_state': {'readonly': True},
}
_attribute_map = {
'advertised_public_prefixes': {'key': 'advertisedPublicPrefixes', 'type': '[str]'},
'advertised_communities': {'key': 'advertisedCommunities', 'type': '[str]'},
'advertised_public_prefixes_state': {'key': 'advertisedPublicPrefixesState', 'type': 'str'},
'legacy_mode': {'key': 'legacyMode', 'type': 'int'},
'customer_asn': {'key': 'customerASN', 'type': 'int'},
'routing_registry_name': {'key': 'routingRegistryName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitPeeringConfig, self).__init__(**kwargs)
self.advertised_public_prefixes = kwargs.get('advertised_public_prefixes', None)
self.advertised_communities = kwargs.get('advertised_communities', None)
self.advertised_public_prefixes_state = None
self.legacy_mode = kwargs.get('legacy_mode', None)
self.customer_asn = kwargs.get('customer_asn', None)
self.routing_registry_name = kwargs.get('routing_registry_name', None)
class ExpressRouteCircuitPeeringId(msrest.serialization.Model):
"""ExpressRoute circuit peering identifier.
:param id: The ID of the ExpressRoute circuit peering.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitPeeringId, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class ExpressRouteCircuitPeeringListResult(msrest.serialization.Model):
"""Response for ListPeering API service call retrieves all peerings that belong to an ExpressRouteCircuit.
:param value: The peerings in an express route circuit.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeering]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitPeering]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitPeeringListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitReference(msrest.serialization.Model):
"""Reference to an express route circuit.
:param id: Corresponding Express Route Circuit Id.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitReference, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class ExpressRouteCircuitRoutesTable(msrest.serialization.Model):
"""The routes table associated with the ExpressRouteCircuit.
:param network: IP address of a network entity.
:type network: str
:param next_hop: NextHop address.
:type next_hop: str
:param loc_prf: Local preference value as set with the set local-preference route-map
configuration command.
:type loc_prf: str
:param weight: Route Weight.
:type weight: int
:param path: Autonomous system paths to the destination network.
:type path: str
"""
_attribute_map = {
'network': {'key': 'network', 'type': 'str'},
'next_hop': {'key': 'nextHop', 'type': 'str'},
'loc_prf': {'key': 'locPrf', 'type': 'str'},
'weight': {'key': 'weight', 'type': 'int'},
'path': {'key': 'path', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitRoutesTable, self).__init__(**kwargs)
self.network = kwargs.get('network', None)
self.next_hop = kwargs.get('next_hop', None)
self.loc_prf = kwargs.get('loc_prf', None)
self.weight = kwargs.get('weight', None)
self.path = kwargs.get('path', None)
class ExpressRouteCircuitRoutesTableSummary(msrest.serialization.Model):
"""The routes table associated with the ExpressRouteCircuit.
:param neighbor: IP address of the neighbor.
:type neighbor: str
:param v: BGP version number spoken to the neighbor.
:type v: int
:param as_property: Autonomous system number.
:type as_property: int
:param up_down: The length of time that the BGP session has been in the Established state, or
the current status if not in the Established state.
:type up_down: str
:param state_pfx_rcd: Current state of the BGP session, and the number of prefixes that have
been received from a neighbor or peer group.
:type state_pfx_rcd: str
"""
_attribute_map = {
'neighbor': {'key': 'neighbor', 'type': 'str'},
'v': {'key': 'v', 'type': 'int'},
'as_property': {'key': 'as', 'type': 'int'},
'up_down': {'key': 'upDown', 'type': 'str'},
'state_pfx_rcd': {'key': 'statePfxRcd', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitRoutesTableSummary, self).__init__(**kwargs)
self.neighbor = kwargs.get('neighbor', None)
self.v = kwargs.get('v', None)
self.as_property = kwargs.get('as_property', None)
self.up_down = kwargs.get('up_down', None)
self.state_pfx_rcd = kwargs.get('state_pfx_rcd', None)
class ExpressRouteCircuitsArpTableListResult(msrest.serialization.Model):
"""Response for ListArpTable associated with the Express Route Circuits API.
:param value: A list of the ARP tables.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitArpTable]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitArpTable]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitsArpTableListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitServiceProviderProperties(msrest.serialization.Model):
"""Contains ServiceProviderProperties in an ExpressRouteCircuit.
:param service_provider_name: The serviceProviderName.
:type service_provider_name: str
:param peering_location: The peering location.
:type peering_location: str
:param bandwidth_in_mbps: The BandwidthInMbps.
:type bandwidth_in_mbps: int
"""
_attribute_map = {
'service_provider_name': {'key': 'serviceProviderName', 'type': 'str'},
'peering_location': {'key': 'peeringLocation', 'type': 'str'},
'bandwidth_in_mbps': {'key': 'bandwidthInMbps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitServiceProviderProperties, self).__init__(**kwargs)
self.service_provider_name = kwargs.get('service_provider_name', None)
self.peering_location = kwargs.get('peering_location', None)
self.bandwidth_in_mbps = kwargs.get('bandwidth_in_mbps', None)
class ExpressRouteCircuitSku(msrest.serialization.Model):
"""Contains SKU in an ExpressRouteCircuit.
:param name: The name of the SKU.
:type name: str
:param tier: The tier of the SKU. Possible values include: "Standard", "Premium", "Basic",
"Local".
:type tier: str or ~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitSkuTier
:param family: The family of the SKU. Possible values include: "UnlimitedData", "MeteredData".
:type family: str or ~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitSkuFamily
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'family': {'key': 'family', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.family = kwargs.get('family', None)
class ExpressRouteCircuitsRoutesTableListResult(msrest.serialization.Model):
"""Response for ListRoutesTable associated with the Express Route Circuits API.
:param value: The list of routes table.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitRoutesTable]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitRoutesTable]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitsRoutesTableListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitsRoutesTableSummaryListResult(msrest.serialization.Model):
"""Response for ListRoutesTable associated with the Express Route Circuits API.
:param value: A list of the routes table.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitRoutesTableSummary]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitRoutesTableSummary]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitsRoutesTableSummaryListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteCircuitStats(msrest.serialization.Model):
"""Contains stats associated with the peering.
:param primarybytes_in: The Primary BytesIn of the peering.
:type primarybytes_in: long
:param primarybytes_out: The primary BytesOut of the peering.
:type primarybytes_out: long
:param secondarybytes_in: The secondary BytesIn of the peering.
:type secondarybytes_in: long
:param secondarybytes_out: The secondary BytesOut of the peering.
:type secondarybytes_out: long
"""
_attribute_map = {
'primarybytes_in': {'key': 'primarybytesIn', 'type': 'long'},
'primarybytes_out': {'key': 'primarybytesOut', 'type': 'long'},
'secondarybytes_in': {'key': 'secondarybytesIn', 'type': 'long'},
'secondarybytes_out': {'key': 'secondarybytesOut', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCircuitStats, self).__init__(**kwargs)
self.primarybytes_in = kwargs.get('primarybytes_in', None)
self.primarybytes_out = kwargs.get('primarybytes_out', None)
self.secondarybytes_in = kwargs.get('secondarybytes_in', None)
self.secondarybytes_out = kwargs.get('secondarybytes_out', None)
class ExpressRouteConnection(SubResource):
"""ExpressRouteConnection resource.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:param name: Required. The name of the resource.
:type name: str
:ivar provisioning_state: The provisioning state of the express route connection resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param express_route_circuit_peering: The ExpressRoute circuit peering.
:type express_route_circuit_peering:
~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeeringId
:param authorization_key: Authorization key to establish the connection.
:type authorization_key: str
:param routing_weight: The routing weight associated to the connection.
:type routing_weight: int
:param enable_internet_security: Enable internet security.
:type enable_internet_security: bool
"""
_validation = {
'name': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'express_route_circuit_peering': {'key': 'properties.expressRouteCircuitPeering', 'type': 'ExpressRouteCircuitPeeringId'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'enable_internet_security': {'key': 'properties.enableInternetSecurity', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteConnection, self).__init__(**kwargs)
self.name = kwargs['name']
self.provisioning_state = None
self.express_route_circuit_peering = kwargs.get('express_route_circuit_peering', None)
self.authorization_key = kwargs.get('authorization_key', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.enable_internet_security = kwargs.get('enable_internet_security', None)
class ExpressRouteConnectionId(msrest.serialization.Model):
"""The ID of the ExpressRouteConnection.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The ID of the ExpressRouteConnection.
:vartype id: str
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteConnectionId, self).__init__(**kwargs)
self.id = None
class ExpressRouteConnectionList(msrest.serialization.Model):
"""ExpressRouteConnection list.
:param value: The list of ExpressRoute connections.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteConnection]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteConnection]'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteConnectionList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class ExpressRouteCrossConnection(Resource):
"""ExpressRouteCrossConnection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar primary_azure_port: The name of the primary port.
:vartype primary_azure_port: str
:ivar secondary_azure_port: The name of the secondary port.
:vartype secondary_azure_port: str
:ivar s_tag: The identifier of the circuit traffic.
:vartype s_tag: int
:param peering_location: The peering location of the ExpressRoute circuit.
:type peering_location: str
:param bandwidth_in_mbps: The circuit bandwidth In Mbps.
:type bandwidth_in_mbps: int
:param express_route_circuit: The ExpressRouteCircuit.
:type express_route_circuit:
~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitReference
:param service_provider_provisioning_state: The provisioning state of the circuit in the
connectivity provider system. Possible values include: "NotProvisioned", "Provisioning",
"Provisioned", "Deprovisioning".
:type service_provider_provisioning_state: str or
~azure.mgmt.network.v2019_09_01.models.ServiceProviderProvisioningState
:param service_provider_notes: Additional read only notes set by the connectivity provider.
:type service_provider_notes: str
:ivar provisioning_state: The provisioning state of the express route cross connection
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param peerings: The list of peerings.
:type peerings: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCrossConnectionPeering]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'primary_azure_port': {'readonly': True},
'secondary_azure_port': {'readonly': True},
's_tag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'primary_azure_port': {'key': 'properties.primaryAzurePort', 'type': 'str'},
'secondary_azure_port': {'key': 'properties.secondaryAzurePort', 'type': 'str'},
's_tag': {'key': 'properties.sTag', 'type': 'int'},
'peering_location': {'key': 'properties.peeringLocation', 'type': 'str'},
'bandwidth_in_mbps': {'key': 'properties.bandwidthInMbps', 'type': 'int'},
'express_route_circuit': {'key': 'properties.expressRouteCircuit', 'type': 'ExpressRouteCircuitReference'},
'service_provider_provisioning_state': {'key': 'properties.serviceProviderProvisioningState', 'type': 'str'},
'service_provider_notes': {'key': 'properties.serviceProviderNotes', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCrossConnectionPeering]'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnection, self).__init__(**kwargs)
self.etag = None
self.primary_azure_port = None
self.secondary_azure_port = None
self.s_tag = None
self.peering_location = kwargs.get('peering_location', None)
self.bandwidth_in_mbps = kwargs.get('bandwidth_in_mbps', None)
self.express_route_circuit = kwargs.get('express_route_circuit', None)
self.service_provider_provisioning_state = kwargs.get('service_provider_provisioning_state', None)
self.service_provider_notes = kwargs.get('service_provider_notes', None)
self.provisioning_state = None
self.peerings = kwargs.get('peerings', None)
class ExpressRouteCrossConnectionListResult(msrest.serialization.Model):
"""Response for ListExpressRouteCrossConnection API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of ExpressRouteCrossConnection resources.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCrossConnection]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCrossConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ExpressRouteCrossConnectionPeering(SubResource):
"""Peering in an ExpressRoute Cross Connection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param peering_type: The peering type. Possible values include: "AzurePublicPeering",
"AzurePrivatePeering", "MicrosoftPeering".
:type peering_type: str or ~azure.mgmt.network.v2019_09_01.models.ExpressRoutePeeringType
:param state: The peering state. Possible values include: "Disabled", "Enabled".
:type state: str or ~azure.mgmt.network.v2019_09_01.models.ExpressRoutePeeringState
:ivar azure_asn: The Azure ASN.
:vartype azure_asn: int
:param peer_asn: The peer ASN.
:type peer_asn: long
:param primary_peer_address_prefix: The primary address prefix.
:type primary_peer_address_prefix: str
:param secondary_peer_address_prefix: The secondary address prefix.
:type secondary_peer_address_prefix: str
:ivar primary_azure_port: The primary port.
:vartype primary_azure_port: str
:ivar secondary_azure_port: The secondary port.
:vartype secondary_azure_port: str
:param shared_key: The shared key.
:type shared_key: str
:param vlan_id: The VLAN ID.
:type vlan_id: int
:param microsoft_peering_config: The Microsoft peering configuration.
:type microsoft_peering_config:
~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeeringConfig
:ivar provisioning_state: The provisioning state of the express route cross connection peering
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param gateway_manager_etag: The GatewayManager Etag.
:type gateway_manager_etag: str
:ivar last_modified_by: Who was the last to modify the peering.
:vartype last_modified_by: str
:param ipv6_peering_config: The IPv6 peering configuration.
:type ipv6_peering_config:
~azure.mgmt.network.v2019_09_01.models.Ipv6ExpressRouteCircuitPeeringConfig
"""
_validation = {
'etag': {'readonly': True},
'azure_asn': {'readonly': True},
'peer_asn': {'maximum': 4294967295, 'minimum': 1},
'primary_azure_port': {'readonly': True},
'secondary_azure_port': {'readonly': True},
'provisioning_state': {'readonly': True},
'last_modified_by': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'peering_type': {'key': 'properties.peeringType', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'azure_asn': {'key': 'properties.azureASN', 'type': 'int'},
'peer_asn': {'key': 'properties.peerASN', 'type': 'long'},
'primary_peer_address_prefix': {'key': 'properties.primaryPeerAddressPrefix', 'type': 'str'},
'secondary_peer_address_prefix': {'key': 'properties.secondaryPeerAddressPrefix', 'type': 'str'},
'primary_azure_port': {'key': 'properties.primaryAzurePort', 'type': 'str'},
'secondary_azure_port': {'key': 'properties.secondaryAzurePort', 'type': 'str'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'vlan_id': {'key': 'properties.vlanId', 'type': 'int'},
'microsoft_peering_config': {'key': 'properties.microsoftPeeringConfig', 'type': 'ExpressRouteCircuitPeeringConfig'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'gateway_manager_etag': {'key': 'properties.gatewayManagerEtag', 'type': 'str'},
'last_modified_by': {'key': 'properties.lastModifiedBy', 'type': 'str'},
'ipv6_peering_config': {'key': 'properties.ipv6PeeringConfig', 'type': 'Ipv6ExpressRouteCircuitPeeringConfig'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionPeering, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.peering_type = kwargs.get('peering_type', None)
self.state = kwargs.get('state', None)
self.azure_asn = None
self.peer_asn = kwargs.get('peer_asn', None)
self.primary_peer_address_prefix = kwargs.get('primary_peer_address_prefix', None)
self.secondary_peer_address_prefix = kwargs.get('secondary_peer_address_prefix', None)
self.primary_azure_port = None
self.secondary_azure_port = None
self.shared_key = kwargs.get('shared_key', None)
self.vlan_id = kwargs.get('vlan_id', None)
self.microsoft_peering_config = kwargs.get('microsoft_peering_config', None)
self.provisioning_state = None
self.gateway_manager_etag = kwargs.get('gateway_manager_etag', None)
self.last_modified_by = None
self.ipv6_peering_config = kwargs.get('ipv6_peering_config', None)
class ExpressRouteCrossConnectionPeeringList(msrest.serialization.Model):
"""Response for ListPeering API service call retrieves all peerings that belong to an ExpressRouteCrossConnection.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: The peerings in an express route cross connection.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCrossConnectionPeering]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCrossConnectionPeering]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionPeeringList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ExpressRouteCrossConnectionRoutesTableSummary(msrest.serialization.Model):
"""The routes table associated with the ExpressRouteCircuit.
:param neighbor: IP address of Neighbor router.
:type neighbor: str
:param asn: Autonomous system number.
:type asn: int
:param up_down: The length of time that the BGP session has been in the Established state, or
the current status if not in the Established state.
:type up_down: str
:param state_or_prefixes_received: Current state of the BGP session, and the number of prefixes
that have been received from a neighbor or peer group.
:type state_or_prefixes_received: str
"""
_attribute_map = {
'neighbor': {'key': 'neighbor', 'type': 'str'},
'asn': {'key': 'asn', 'type': 'int'},
'up_down': {'key': 'upDown', 'type': 'str'},
'state_or_prefixes_received': {'key': 'stateOrPrefixesReceived', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionRoutesTableSummary, self).__init__(**kwargs)
self.neighbor = kwargs.get('neighbor', None)
self.asn = kwargs.get('asn', None)
self.up_down = kwargs.get('up_down', None)
self.state_or_prefixes_received = kwargs.get('state_or_prefixes_received', None)
class ExpressRouteCrossConnectionsRoutesTableSummaryListResult(msrest.serialization.Model):
"""Response for ListRoutesTable associated with the Express Route Cross Connections.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of the routes table.
:type value:
list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCrossConnectionRoutesTableSummary]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCrossConnectionRoutesTableSummary]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteCrossConnectionsRoutesTableSummaryListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ExpressRouteGateway(Resource):
"""ExpressRoute gateway resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param auto_scale_configuration: Configuration for auto scaling.
:type auto_scale_configuration:
~azure.mgmt.network.v2019_09_01.models.ExpressRouteGatewayPropertiesAutoScaleConfiguration
:ivar express_route_connections: List of ExpressRoute connections to the ExpressRoute gateway.
:vartype express_route_connections:
list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteConnection]
:ivar provisioning_state: The provisioning state of the express route gateway resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param virtual_hub: The Virtual Hub where the ExpressRoute gateway is or will be deployed.
:type virtual_hub: ~azure.mgmt.network.v2019_09_01.models.VirtualHubId
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'express_route_connections': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'auto_scale_configuration': {'key': 'properties.autoScaleConfiguration', 'type': 'ExpressRouteGatewayPropertiesAutoScaleConfiguration'},
'express_route_connections': {'key': 'properties.expressRouteConnections', 'type': '[ExpressRouteConnection]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'virtual_hub': {'key': 'properties.virtualHub', 'type': 'VirtualHubId'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteGateway, self).__init__(**kwargs)
self.etag = None
self.auto_scale_configuration = kwargs.get('auto_scale_configuration', None)
self.express_route_connections = None
self.provisioning_state = None
self.virtual_hub = kwargs.get('virtual_hub', None)
class ExpressRouteGatewayList(msrest.serialization.Model):
"""List of ExpressRoute gateways.
:param value: List of ExpressRoute gateways.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteGateway]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteGateway]'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteGatewayList, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class ExpressRouteGatewayPropertiesAutoScaleConfiguration(msrest.serialization.Model):
"""Configuration for auto scaling.
:param bounds: Minimum and maximum number of scale units to deploy.
:type bounds:
~azure.mgmt.network.v2019_09_01.models.ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds
"""
_attribute_map = {
'bounds': {'key': 'bounds', 'type': 'ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteGatewayPropertiesAutoScaleConfiguration, self).__init__(**kwargs)
self.bounds = kwargs.get('bounds', None)
class ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds(msrest.serialization.Model):
"""Minimum and maximum number of scale units to deploy.
:param min: Minimum number of scale units deployed for ExpressRoute gateway.
:type min: int
:param max: Maximum number of scale units deployed for ExpressRoute gateway.
:type max: int
"""
_attribute_map = {
'min': {'key': 'min', 'type': 'int'},
'max': {'key': 'max', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds, self).__init__(**kwargs)
self.min = kwargs.get('min', None)
self.max = kwargs.get('max', None)
class ExpressRouteLink(SubResource):
"""ExpressRouteLink child resource definition.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of child port resource that is unique among child port resources of the
parent.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar router_name: Name of Azure router associated with physical port.
:vartype router_name: str
:ivar interface_name: Name of Azure router interface.
:vartype interface_name: str
:ivar patch_panel_id: Mapping between physical port to patch panel port.
:vartype patch_panel_id: str
:ivar rack_id: Mapping of physical patch panel to rack.
:vartype rack_id: str
:ivar connector_type: Physical fiber port type. Possible values include: "LC", "SC".
:vartype connector_type: str or
~azure.mgmt.network.v2019_09_01.models.ExpressRouteLinkConnectorType
:param admin_state: Administrative state of the physical port. Possible values include:
"Enabled", "Disabled".
:type admin_state: str or ~azure.mgmt.network.v2019_09_01.models.ExpressRouteLinkAdminState
:ivar provisioning_state: The provisioning state of the express route link resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param mac_sec_config: MacSec configuration.
:type mac_sec_config: ~azure.mgmt.network.v2019_09_01.models.ExpressRouteLinkMacSecConfig
"""
_validation = {
'etag': {'readonly': True},
'router_name': {'readonly': True},
'interface_name': {'readonly': True},
'patch_panel_id': {'readonly': True},
'rack_id': {'readonly': True},
'connector_type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'router_name': {'key': 'properties.routerName', 'type': 'str'},
'interface_name': {'key': 'properties.interfaceName', 'type': 'str'},
'patch_panel_id': {'key': 'properties.patchPanelId', 'type': 'str'},
'rack_id': {'key': 'properties.rackId', 'type': 'str'},
'connector_type': {'key': 'properties.connectorType', 'type': 'str'},
'admin_state': {'key': 'properties.adminState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'mac_sec_config': {'key': 'properties.macSecConfig', 'type': 'ExpressRouteLinkMacSecConfig'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteLink, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.router_name = None
self.interface_name = None
self.patch_panel_id = None
self.rack_id = None
self.connector_type = None
self.admin_state = kwargs.get('admin_state', None)
self.provisioning_state = None
self.mac_sec_config = kwargs.get('mac_sec_config', None)
class ExpressRouteLinkListResult(msrest.serialization.Model):
"""Response for ListExpressRouteLinks API service call.
:param value: The list of ExpressRouteLink sub-resources.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteLink]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteLink]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteLinkListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteLinkMacSecConfig(msrest.serialization.Model):
"""ExpressRouteLink Mac Security Configuration.
:param ckn_secret_identifier: Keyvault Secret Identifier URL containing Mac security CKN key.
:type ckn_secret_identifier: str
:param cak_secret_identifier: Keyvault Secret Identifier URL containing Mac security CAK key.
:type cak_secret_identifier: str
:param cipher: Mac security cipher. Possible values include: "gcm-aes-128", "gcm-aes-256".
:type cipher: str or ~azure.mgmt.network.v2019_09_01.models.ExpressRouteLinkMacSecCipher
"""
_attribute_map = {
'ckn_secret_identifier': {'key': 'cknSecretIdentifier', 'type': 'str'},
'cak_secret_identifier': {'key': 'cakSecretIdentifier', 'type': 'str'},
'cipher': {'key': 'cipher', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteLinkMacSecConfig, self).__init__(**kwargs)
self.ckn_secret_identifier = kwargs.get('ckn_secret_identifier', None)
self.cak_secret_identifier = kwargs.get('cak_secret_identifier', None)
self.cipher = kwargs.get('cipher', None)
class ExpressRoutePort(Resource):
"""ExpressRoutePort resource definition.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param identity: The identity of ExpressRoutePort, if configured.
:type identity: ~azure.mgmt.network.v2019_09_01.models.ManagedServiceIdentity
:param peering_location: The name of the peering location that the ExpressRoutePort is mapped
to physically.
:type peering_location: str
:param bandwidth_in_gbps: Bandwidth of procured ports in Gbps.
:type bandwidth_in_gbps: int
:ivar provisioned_bandwidth_in_gbps: Aggregate Gbps of associated circuit bandwidths.
:vartype provisioned_bandwidth_in_gbps: float
:ivar mtu: Maximum transmission unit of the physical port pair(s).
:vartype mtu: str
:param encapsulation: Encapsulation method on physical ports. Possible values include: "Dot1Q",
"QinQ".
:type encapsulation: str or
~azure.mgmt.network.v2019_09_01.models.ExpressRoutePortsEncapsulation
:ivar ether_type: Ether type of the physical port.
:vartype ether_type: str
:ivar allocation_date: Date of the physical port allocation to be used in Letter of
Authorization.
:vartype allocation_date: str
:param links: The set of physical links of the ExpressRoutePort resource.
:type links: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteLink]
:ivar circuits: Reference the ExpressRoute circuit(s) that are provisioned on this
ExpressRoutePort resource.
:vartype circuits: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar provisioning_state: The provisioning state of the express route port resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:ivar resource_guid: The resource GUID property of the express route port resource.
:vartype resource_guid: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioned_bandwidth_in_gbps': {'readonly': True},
'mtu': {'readonly': True},
'ether_type': {'readonly': True},
'allocation_date': {'readonly': True},
'circuits': {'readonly': True},
'provisioning_state': {'readonly': True},
'resource_guid': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'peering_location': {'key': 'properties.peeringLocation', 'type': 'str'},
'bandwidth_in_gbps': {'key': 'properties.bandwidthInGbps', 'type': 'int'},
'provisioned_bandwidth_in_gbps': {'key': 'properties.provisionedBandwidthInGbps', 'type': 'float'},
'mtu': {'key': 'properties.mtu', 'type': 'str'},
'encapsulation': {'key': 'properties.encapsulation', 'type': 'str'},
'ether_type': {'key': 'properties.etherType', 'type': 'str'},
'allocation_date': {'key': 'properties.allocationDate', 'type': 'str'},
'links': {'key': 'properties.links', 'type': '[ExpressRouteLink]'},
'circuits': {'key': 'properties.circuits', 'type': '[SubResource]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePort, self).__init__(**kwargs)
self.etag = None
self.identity = kwargs.get('identity', None)
self.peering_location = kwargs.get('peering_location', None)
self.bandwidth_in_gbps = kwargs.get('bandwidth_in_gbps', None)
self.provisioned_bandwidth_in_gbps = None
self.mtu = None
self.encapsulation = kwargs.get('encapsulation', None)
self.ether_type = None
self.allocation_date = None
self.links = kwargs.get('links', None)
self.circuits = None
self.provisioning_state = None
self.resource_guid = None
class ExpressRoutePortListResult(msrest.serialization.Model):
"""Response for ListExpressRoutePorts API service call.
:param value: A list of ExpressRoutePort resources.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRoutePort]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRoutePort]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePortListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRoutePortsLocation(Resource):
"""Definition of the ExpressRoutePorts peering location resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar address: Address of peering location.
:vartype address: str
:ivar contact: Contact details of peering locations.
:vartype contact: str
:param available_bandwidths: The inventory of available ExpressRoutePort bandwidths.
:type available_bandwidths:
list[~azure.mgmt.network.v2019_09_01.models.ExpressRoutePortsLocationBandwidths]
:ivar provisioning_state: The provisioning state of the express route port location resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'address': {'readonly': True},
'contact': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'address': {'key': 'properties.address', 'type': 'str'},
'contact': {'key': 'properties.contact', 'type': 'str'},
'available_bandwidths': {'key': 'properties.availableBandwidths', 'type': '[ExpressRoutePortsLocationBandwidths]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePortsLocation, self).__init__(**kwargs)
self.address = None
self.contact = None
self.available_bandwidths = kwargs.get('available_bandwidths', None)
self.provisioning_state = None
class ExpressRoutePortsLocationBandwidths(msrest.serialization.Model):
"""Real-time inventory of available ExpressRoute port bandwidths.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar offer_name: Bandwidth descriptive name.
:vartype offer_name: str
:ivar value_in_gbps: Bandwidth value in Gbps.
:vartype value_in_gbps: int
"""
_validation = {
'offer_name': {'readonly': True},
'value_in_gbps': {'readonly': True},
}
_attribute_map = {
'offer_name': {'key': 'offerName', 'type': 'str'},
'value_in_gbps': {'key': 'valueInGbps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePortsLocationBandwidths, self).__init__(**kwargs)
self.offer_name = None
self.value_in_gbps = None
class ExpressRoutePortsLocationListResult(msrest.serialization.Model):
"""Response for ListExpressRoutePortsLocations API service call.
:param value: The list of all ExpressRoutePort peering locations.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRoutePortsLocation]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRoutePortsLocation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRoutePortsLocationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ExpressRouteServiceProvider(Resource):
"""A ExpressRouteResourceProvider object.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param peering_locations: A list of peering locations.
:type peering_locations: list[str]
:param bandwidths_offered: A list of bandwidths offered.
:type bandwidths_offered:
list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteServiceProviderBandwidthsOffered]
:ivar provisioning_state: The provisioning state of the express route service provider
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'peering_locations': {'key': 'properties.peeringLocations', 'type': '[str]'},
'bandwidths_offered': {'key': 'properties.bandwidthsOffered', 'type': '[ExpressRouteServiceProviderBandwidthsOffered]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteServiceProvider, self).__init__(**kwargs)
self.peering_locations = kwargs.get('peering_locations', None)
self.bandwidths_offered = kwargs.get('bandwidths_offered', None)
self.provisioning_state = None
class ExpressRouteServiceProviderBandwidthsOffered(msrest.serialization.Model):
"""Contains bandwidths offered in ExpressRouteServiceProvider resources.
:param offer_name: The OfferName.
:type offer_name: str
:param value_in_mbps: The ValueInMbps.
:type value_in_mbps: int
"""
_attribute_map = {
'offer_name': {'key': 'offerName', 'type': 'str'},
'value_in_mbps': {'key': 'valueInMbps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteServiceProviderBandwidthsOffered, self).__init__(**kwargs)
self.offer_name = kwargs.get('offer_name', None)
self.value_in_mbps = kwargs.get('value_in_mbps', None)
class ExpressRouteServiceProviderListResult(msrest.serialization.Model):
"""Response for the ListExpressRouteServiceProvider API service call.
:param value: A list of ExpressRouteResourceProvider resources.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteServiceProvider]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteServiceProvider]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ExpressRouteServiceProviderListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class FirewallPolicy(Resource):
"""FirewallPolicy Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar rule_groups: List of references to FirewallPolicyRuleGroups.
:vartype rule_groups: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar provisioning_state: The provisioning state of the firewall policy resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param base_policy: The parent firewall policy from which rules are inherited.
:type base_policy: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar firewalls: List of references to Azure Firewalls that this Firewall Policy is associated
with.
:vartype firewalls: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar child_policies: List of references to Child Firewall Policies.
:vartype child_policies: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param threat_intel_mode: The operation mode for Threat Intelligence. Possible values include:
"Alert", "Deny", "Off".
:type threat_intel_mode: str or
~azure.mgmt.network.v2019_09_01.models.AzureFirewallThreatIntelMode
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'rule_groups': {'readonly': True},
'provisioning_state': {'readonly': True},
'firewalls': {'readonly': True},
'child_policies': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'rule_groups': {'key': 'properties.ruleGroups', 'type': '[SubResource]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'base_policy': {'key': 'properties.basePolicy', 'type': 'SubResource'},
'firewalls': {'key': 'properties.firewalls', 'type': '[SubResource]'},
'child_policies': {'key': 'properties.childPolicies', 'type': '[SubResource]'},
'threat_intel_mode': {'key': 'properties.threatIntelMode', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FirewallPolicy, self).__init__(**kwargs)
self.etag = None
self.rule_groups = None
self.provisioning_state = None
self.base_policy = kwargs.get('base_policy', None)
self.firewalls = None
self.child_policies = None
self.threat_intel_mode = kwargs.get('threat_intel_mode', None)
class FirewallPolicyRule(msrest.serialization.Model):
"""Properties of the rule.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: FirewallPolicyFilterRule, FirewallPolicyNatRule.
All required parameters must be populated in order to send to Azure.
:param rule_type: Required. The type of the rule.Constant filled by server. Possible values
include: "FirewallPolicyNatRule", "FirewallPolicyFilterRule".
:type rule_type: str or ~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleType
:param name: The name of the rule.
:type name: str
:param priority: Priority of the Firewall Policy Rule resource.
:type priority: int
"""
_validation = {
'rule_type': {'required': True},
'priority': {'maximum': 65000, 'minimum': 100},
}
_attribute_map = {
'rule_type': {'key': 'ruleType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
}
_subtype_map = {
'rule_type': {'FirewallPolicyFilterRule': 'FirewallPolicyFilterRule', 'FirewallPolicyNatRule': 'FirewallPolicyNatRule'}
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyRule, self).__init__(**kwargs)
self.rule_type = None # type: Optional[str]
self.name = kwargs.get('name', None)
self.priority = kwargs.get('priority', None)
class FirewallPolicyFilterRule(FirewallPolicyRule):
"""Firewall Policy Filter Rule.
All required parameters must be populated in order to send to Azure.
:param rule_type: Required. The type of the rule.Constant filled by server. Possible values
include: "FirewallPolicyNatRule", "FirewallPolicyFilterRule".
:type rule_type: str or ~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleType
:param name: The name of the rule.
:type name: str
:param priority: Priority of the Firewall Policy Rule resource.
:type priority: int
:param action: The action type of a Filter rule.
:type action: ~azure.mgmt.network.v2019_09_01.models.FirewallPolicyFilterRuleAction
:param rule_conditions: Collection of rule conditions used by a rule.
:type rule_conditions: list[~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleCondition]
"""
_validation = {
'rule_type': {'required': True},
'priority': {'maximum': 65000, 'minimum': 100},
}
_attribute_map = {
'rule_type': {'key': 'ruleType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'action': {'key': 'action', 'type': 'FirewallPolicyFilterRuleAction'},
'rule_conditions': {'key': 'ruleConditions', 'type': '[FirewallPolicyRuleCondition]'},
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyFilterRule, self).__init__(**kwargs)
self.rule_type = 'FirewallPolicyFilterRule' # type: str
self.action = kwargs.get('action', None)
self.rule_conditions = kwargs.get('rule_conditions', None)
class FirewallPolicyFilterRuleAction(msrest.serialization.Model):
"""Properties of the FirewallPolicyFilterRuleAction.
:param type: The type of action. Possible values include: "Allow", "Deny".
:type type: str or ~azure.mgmt.network.v2019_09_01.models.FirewallPolicyFilterRuleActionType
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyFilterRuleAction, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
class FirewallPolicyListResult(msrest.serialization.Model):
"""Response for ListFirewallPolicies API service call.
:param value: List of Firewall Policies in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.FirewallPolicy]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[FirewallPolicy]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class FirewallPolicyNatRule(FirewallPolicyRule):
"""Firewall Policy NAT Rule.
All required parameters must be populated in order to send to Azure.
:param rule_type: Required. The type of the rule.Constant filled by server. Possible values
include: "FirewallPolicyNatRule", "FirewallPolicyFilterRule".
:type rule_type: str or ~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleType
:param name: The name of the rule.
:type name: str
:param priority: Priority of the Firewall Policy Rule resource.
:type priority: int
:param action: The action type of a Nat rule.
:type action: ~azure.mgmt.network.v2019_09_01.models.FirewallPolicyNatRuleAction
:param translated_address: The translated address for this NAT rule.
:type translated_address: str
:param translated_port: The translated port for this NAT rule.
:type translated_port: str
:param rule_condition: The match conditions for incoming traffic.
:type rule_condition: ~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleCondition
"""
_validation = {
'rule_type': {'required': True},
'priority': {'maximum': 65000, 'minimum': 100},
}
_attribute_map = {
'rule_type': {'key': 'ruleType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'action': {'key': 'action', 'type': 'FirewallPolicyNatRuleAction'},
'translated_address': {'key': 'translatedAddress', 'type': 'str'},
'translated_port': {'key': 'translatedPort', 'type': 'str'},
'rule_condition': {'key': 'ruleCondition', 'type': 'FirewallPolicyRuleCondition'},
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyNatRule, self).__init__(**kwargs)
self.rule_type = 'FirewallPolicyNatRule' # type: str
self.action = kwargs.get('action', None)
self.translated_address = kwargs.get('translated_address', None)
self.translated_port = kwargs.get('translated_port', None)
self.rule_condition = kwargs.get('rule_condition', None)
class FirewallPolicyNatRuleAction(msrest.serialization.Model):
"""Properties of the FirewallPolicyNatRuleAction.
:param type: The type of action. Possible values include: "DNAT".
:type type: str or ~azure.mgmt.network.v2019_09_01.models.FirewallPolicyNatRuleActionType
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyNatRuleAction, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
class FirewallPolicyRuleConditionApplicationProtocol(msrest.serialization.Model):
"""Properties of the application rule protocol.
:param protocol_type: Protocol type. Possible values include: "Http", "Https".
:type protocol_type: str or
~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleConditionApplicationProtocolType
:param port: Port number for the protocol, cannot be greater than 64000.
:type port: int
"""
_validation = {
'port': {'maximum': 64000, 'minimum': 0},
}
_attribute_map = {
'protocol_type': {'key': 'protocolType', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyRuleConditionApplicationProtocol, self).__init__(**kwargs)
self.protocol_type = kwargs.get('protocol_type', None)
self.port = kwargs.get('port', None)
class FirewallPolicyRuleGroup(SubResource):
"""Rule Group resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Rule Group type.
:vartype type: str
:param priority: Priority of the Firewall Policy Rule Group resource.
:type priority: int
:param rules: Group of Firewall Policy rules.
:type rules: list[~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRule]
:ivar provisioning_state: The provisioning state of the firewall policy rule group resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'priority': {'maximum': 65000, 'minimum': 100},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'rules': {'key': 'properties.rules', 'type': '[FirewallPolicyRule]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyRuleGroup, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.priority = kwargs.get('priority', None)
self.rules = kwargs.get('rules', None)
self.provisioning_state = None
class FirewallPolicyRuleGroupListResult(msrest.serialization.Model):
"""Response for ListFirewallPolicyRuleGroups API service call.
:param value: List of FirewallPolicyRuleGroups in a FirewallPolicy.
:type value: list[~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleGroup]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[FirewallPolicyRuleGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FirewallPolicyRuleGroupListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class FlowLogFormatParameters(msrest.serialization.Model):
"""Parameters that define the flow log format.
:param type: The file type of flow log. Possible values include: "JSON".
:type type: str or ~azure.mgmt.network.v2019_09_01.models.FlowLogFormatType
:param version: The version (revision) of the flow log.
:type version: int
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(FlowLogFormatParameters, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', 0)
class FlowLogInformation(msrest.serialization.Model):
"""Information on the configuration of flow log and traffic analytics (optional) .
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The ID of the resource to configure for flow log and
traffic analytics (optional) .
:type target_resource_id: str
:param flow_analytics_configuration: Parameters that define the configuration of traffic
analytics.
:type flow_analytics_configuration:
~azure.mgmt.network.v2019_09_01.models.TrafficAnalyticsProperties
:param storage_id: Required. ID of the storage account which is used to store the flow log.
:type storage_id: str
:param enabled: Required. Flag to enable/disable flow logging.
:type enabled: bool
:param retention_policy: Parameters that define the retention policy for flow log.
:type retention_policy: ~azure.mgmt.network.v2019_09_01.models.RetentionPolicyParameters
:param format: Parameters that define the flow log format.
:type format: ~azure.mgmt.network.v2019_09_01.models.FlowLogFormatParameters
"""
_validation = {
'target_resource_id': {'required': True},
'storage_id': {'required': True},
'enabled': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'flow_analytics_configuration': {'key': 'flowAnalyticsConfiguration', 'type': 'TrafficAnalyticsProperties'},
'storage_id': {'key': 'properties.storageId', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'retention_policy': {'key': 'properties.retentionPolicy', 'type': 'RetentionPolicyParameters'},
'format': {'key': 'properties.format', 'type': 'FlowLogFormatParameters'},
}
def __init__(
self,
**kwargs
):
super(FlowLogInformation, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.flow_analytics_configuration = kwargs.get('flow_analytics_configuration', None)
self.storage_id = kwargs['storage_id']
self.enabled = kwargs['enabled']
self.retention_policy = kwargs.get('retention_policy', None)
self.format = kwargs.get('format', None)
class FlowLogStatusParameters(msrest.serialization.Model):
"""Parameters that define a resource to query flow log and traffic analytics (optional) status.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The target resource where getting the flow log and traffic
analytics (optional) status.
:type target_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FlowLogStatusParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
class FrontendIPConfiguration(SubResource):
"""Frontend IP address of the load balancer.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the set of frontend IP
configurations used by the load balancer. This name can be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param zones: A list of availability zones denoting the IP allocated for the resource needs to
come from.
:type zones: list[str]
:ivar inbound_nat_rules: An array of references to inbound rules that use this frontend IP.
:vartype inbound_nat_rules: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar inbound_nat_pools: An array of references to inbound pools that use this frontend IP.
:vartype inbound_nat_pools: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar outbound_rules: An array of references to outbound rules that use this frontend IP.
:vartype outbound_rules: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar load_balancing_rules: An array of references to load balancing rules that use this
frontend IP.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param private_ip_address: The private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The Private IP allocation method. Possible values include:
"Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_09_01.models.IPAllocationMethod
:param private_ip_address_version: Whether the specific ipconfiguration is IPv4 or IPv6.
Default is taken as IPv4. Possible values include: "IPv4", "IPv6".
:type private_ip_address_version: str or ~azure.mgmt.network.v2019_09_01.models.IPVersion
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.Subnet
:param public_ip_address: The reference of the Public IP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_09_01.models.PublicIPAddress
:param public_ip_prefix: The reference of the Public IP Prefix resource.
:type public_ip_prefix: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar provisioning_state: The provisioning state of the frontend IP configuration resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'inbound_nat_rules': {'readonly': True},
'inbound_nat_pools': {'readonly': True},
'outbound_rules': {'readonly': True},
'load_balancing_rules': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'zones': {'key': 'zones', 'type': '[str]'},
'inbound_nat_rules': {'key': 'properties.inboundNatRules', 'type': '[SubResource]'},
'inbound_nat_pools': {'key': 'properties.inboundNatPools', 'type': '[SubResource]'},
'outbound_rules': {'key': 'properties.outboundRules', 'type': '[SubResource]'},
'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'private_ip_address_version': {'key': 'properties.privateIPAddressVersion', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'PublicIPAddress'},
'public_ip_prefix': {'key': 'properties.publicIPPrefix', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(FrontendIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.zones = kwargs.get('zones', None)
self.inbound_nat_rules = None
self.inbound_nat_pools = None
self.outbound_rules = None
self.load_balancing_rules = None
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.private_ip_address_version = kwargs.get('private_ip_address_version', None)
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.public_ip_prefix = kwargs.get('public_ip_prefix', None)
self.provisioning_state = None
class GatewayRoute(msrest.serialization.Model):
"""Gateway routing details.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar local_address: The gateway's local address.
:vartype local_address: str
:ivar network: The route's network prefix.
:vartype network: str
:ivar next_hop: The route's next hop.
:vartype next_hop: str
:ivar source_peer: The peer this route was learned from.
:vartype source_peer: str
:ivar origin: The source this route was learned from.
:vartype origin: str
:ivar as_path: The route's AS path sequence.
:vartype as_path: str
:ivar weight: The route's weight.
:vartype weight: int
"""
_validation = {
'local_address': {'readonly': True},
'network': {'readonly': True},
'next_hop': {'readonly': True},
'source_peer': {'readonly': True},
'origin': {'readonly': True},
'as_path': {'readonly': True},
'weight': {'readonly': True},
}
_attribute_map = {
'local_address': {'key': 'localAddress', 'type': 'str'},
'network': {'key': 'network', 'type': 'str'},
'next_hop': {'key': 'nextHop', 'type': 'str'},
'source_peer': {'key': 'sourcePeer', 'type': 'str'},
'origin': {'key': 'origin', 'type': 'str'},
'as_path': {'key': 'asPath', 'type': 'str'},
'weight': {'key': 'weight', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(GatewayRoute, self).__init__(**kwargs)
self.local_address = None
self.network = None
self.next_hop = None
self.source_peer = None
self.origin = None
self.as_path = None
self.weight = None
class GatewayRouteListResult(msrest.serialization.Model):
"""List of virtual network gateway routes.
:param value: List of gateway routes.
:type value: list[~azure.mgmt.network.v2019_09_01.models.GatewayRoute]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[GatewayRoute]'},
}
def __init__(
self,
**kwargs
):
super(GatewayRouteListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class GetVpnSitesConfigurationRequest(msrest.serialization.Model):
"""List of Vpn-Sites.
All required parameters must be populated in order to send to Azure.
:param vpn_sites: List of resource-ids of the vpn-sites for which config is to be downloaded.
:type vpn_sites: list[str]
:param output_blob_sas_url: Required. The sas-url to download the configurations for vpn-sites.
:type output_blob_sas_url: str
"""
_validation = {
'output_blob_sas_url': {'required': True},
}
_attribute_map = {
'vpn_sites': {'key': 'vpnSites', 'type': '[str]'},
'output_blob_sas_url': {'key': 'outputBlobSasUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(GetVpnSitesConfigurationRequest, self).__init__(**kwargs)
self.vpn_sites = kwargs.get('vpn_sites', None)
self.output_blob_sas_url = kwargs['output_blob_sas_url']
class HTTPConfiguration(msrest.serialization.Model):
"""HTTP configuration of the connectivity check.
:param method: HTTP method. Possible values include: "Get".
:type method: str or ~azure.mgmt.network.v2019_09_01.models.HTTPMethod
:param headers: List of HTTP headers.
:type headers: list[~azure.mgmt.network.v2019_09_01.models.HTTPHeader]
:param valid_status_codes: Valid status codes.
:type valid_status_codes: list[int]
"""
_attribute_map = {
'method': {'key': 'method', 'type': 'str'},
'headers': {'key': 'headers', 'type': '[HTTPHeader]'},
'valid_status_codes': {'key': 'validStatusCodes', 'type': '[int]'},
}
def __init__(
self,
**kwargs
):
super(HTTPConfiguration, self).__init__(**kwargs)
self.method = kwargs.get('method', None)
self.headers = kwargs.get('headers', None)
self.valid_status_codes = kwargs.get('valid_status_codes', None)
class HTTPHeader(msrest.serialization.Model):
"""Describes the HTTP header.
:param name: The name in HTTP header.
:type name: str
:param value: The value in HTTP header.
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(HTTPHeader, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class HubIPAddresses(msrest.serialization.Model):
"""IP addresses associated with azure firewall.
:param public_ip_addresses: List of Public IP addresses associated with azure firewall.
:type public_ip_addresses:
list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallPublicIPAddress]
:param private_ip_address: Private IP Address associated with azure firewall.
:type private_ip_address: str
"""
_attribute_map = {
'public_ip_addresses': {'key': 'publicIPAddresses', 'type': '[AzureFirewallPublicIPAddress]'},
'private_ip_address': {'key': 'privateIPAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(HubIPAddresses, self).__init__(**kwargs)
self.public_ip_addresses = kwargs.get('public_ip_addresses', None)
self.private_ip_address = kwargs.get('private_ip_address', None)
class HubVirtualNetworkConnection(SubResource):
"""HubVirtualNetworkConnection Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param remote_virtual_network: Reference to the remote virtual network.
:type remote_virtual_network: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param allow_hub_to_remote_vnet_transit: VirtualHub to RemoteVnet transit to enabled or not.
:type allow_hub_to_remote_vnet_transit: bool
:param allow_remote_vnet_to_use_hub_vnet_gateways: Allow RemoteVnet to use Virtual Hub's
gateways.
:type allow_remote_vnet_to_use_hub_vnet_gateways: bool
:param enable_internet_security: Enable internet security.
:type enable_internet_security: bool
:ivar provisioning_state: The provisioning state of the hub virtual network connection
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'allow_hub_to_remote_vnet_transit': {'key': 'properties.allowHubToRemoteVnetTransit', 'type': 'bool'},
'allow_remote_vnet_to_use_hub_vnet_gateways': {'key': 'properties.allowRemoteVnetToUseHubVnetGateways', 'type': 'bool'},
'enable_internet_security': {'key': 'properties.enableInternetSecurity', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(HubVirtualNetworkConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.remote_virtual_network = kwargs.get('remote_virtual_network', None)
self.allow_hub_to_remote_vnet_transit = kwargs.get('allow_hub_to_remote_vnet_transit', None)
self.allow_remote_vnet_to_use_hub_vnet_gateways = kwargs.get('allow_remote_vnet_to_use_hub_vnet_gateways', None)
self.enable_internet_security = kwargs.get('enable_internet_security', None)
self.provisioning_state = None
class InboundNatPool(SubResource):
"""Inbound NAT pool of the load balancer.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the set of inbound NAT pools used
by the load balancer. This name can be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param frontend_ip_configuration: A reference to frontend IP addresses.
:type frontend_ip_configuration: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param protocol: The reference to the transport protocol used by the inbound NAT pool. Possible
values include: "Udp", "Tcp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.TransportProtocol
:param frontend_port_range_start: The first port number in the range of external ports that
will be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values
range between 1 and 65534.
:type frontend_port_range_start: int
:param frontend_port_range_end: The last port number in the range of external ports that will
be used to provide Inbound Nat to NICs associated with a load balancer. Acceptable values range
between 1 and 65535.
:type frontend_port_range_end: int
:param backend_port: The port used for internal connections on the endpoint. Acceptable values
are between 1 and 65535.
:type backend_port: int
:param idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:type idle_timeout_in_minutes: int
:param enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:type enable_floating_ip: bool
:param enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected
connection termination. This element is only used when the protocol is set to TCP.
:type enable_tcp_reset: bool
:ivar provisioning_state: The provisioning state of the inbound NAT pool resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'frontend_port_range_start': {'key': 'properties.frontendPortRangeStart', 'type': 'int'},
'frontend_port_range_end': {'key': 'properties.frontendPortRangeEnd', 'type': 'int'},
'backend_port': {'key': 'properties.backendPort', 'type': 'int'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'enable_floating_ip': {'key': 'properties.enableFloatingIP', 'type': 'bool'},
'enable_tcp_reset': {'key': 'properties.enableTcpReset', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(InboundNatPool, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.frontend_ip_configuration = kwargs.get('frontend_ip_configuration', None)
self.protocol = kwargs.get('protocol', None)
self.frontend_port_range_start = kwargs.get('frontend_port_range_start', None)
self.frontend_port_range_end = kwargs.get('frontend_port_range_end', None)
self.backend_port = kwargs.get('backend_port', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.enable_floating_ip = kwargs.get('enable_floating_ip', None)
self.enable_tcp_reset = kwargs.get('enable_tcp_reset', None)
self.provisioning_state = None
class InboundNatRule(SubResource):
"""Inbound NAT rule of the load balancer.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the set of inbound NAT rules used
by the load balancer. This name can be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param frontend_ip_configuration: A reference to frontend IP addresses.
:type frontend_ip_configuration: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar backend_ip_configuration: A reference to a private IP address defined on a network
interface of a VM. Traffic sent to the frontend port of each of the frontend IP configurations
is forwarded to the backend IP.
:vartype backend_ip_configuration:
~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceIPConfiguration
:param protocol: The reference to the transport protocol used by the load balancing rule.
Possible values include: "Udp", "Tcp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.TransportProtocol
:param frontend_port: The port for the external endpoint. Port numbers for each rule must be
unique within the Load Balancer. Acceptable values range from 1 to 65534.
:type frontend_port: int
:param backend_port: The port used for the internal endpoint. Acceptable values range from 1 to
65535.
:type backend_port: int
:param idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:type idle_timeout_in_minutes: int
:param enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:type enable_floating_ip: bool
:param enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected
connection termination. This element is only used when the protocol is set to TCP.
:type enable_tcp_reset: bool
:ivar provisioning_state: The provisioning state of the inbound NAT rule resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'backend_ip_configuration': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'},
'backend_ip_configuration': {'key': 'properties.backendIPConfiguration', 'type': 'NetworkInterfaceIPConfiguration'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'frontend_port': {'key': 'properties.frontendPort', 'type': 'int'},
'backend_port': {'key': 'properties.backendPort', 'type': 'int'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'enable_floating_ip': {'key': 'properties.enableFloatingIP', 'type': 'bool'},
'enable_tcp_reset': {'key': 'properties.enableTcpReset', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(InboundNatRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.frontend_ip_configuration = kwargs.get('frontend_ip_configuration', None)
self.backend_ip_configuration = None
self.protocol = kwargs.get('protocol', None)
self.frontend_port = kwargs.get('frontend_port', None)
self.backend_port = kwargs.get('backend_port', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.enable_floating_ip = kwargs.get('enable_floating_ip', None)
self.enable_tcp_reset = kwargs.get('enable_tcp_reset', None)
self.provisioning_state = None
class InboundNatRuleListResult(msrest.serialization.Model):
"""Response for ListInboundNatRule API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of inbound nat rules in a load balancer.
:type value: list[~azure.mgmt.network.v2019_09_01.models.InboundNatRule]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[InboundNatRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(InboundNatRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class IPAddressAvailabilityResult(msrest.serialization.Model):
"""Response for CheckIPAddressAvailability API service call.
:param available: Private IP address availability.
:type available: bool
:param available_ip_addresses: Contains other available private IP addresses if the asked for
address is taken.
:type available_ip_addresses: list[str]
"""
_attribute_map = {
'available': {'key': 'available', 'type': 'bool'},
'available_ip_addresses': {'key': 'availableIPAddresses', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(IPAddressAvailabilityResult, self).__init__(**kwargs)
self.available = kwargs.get('available', None)
self.available_ip_addresses = kwargs.get('available_ip_addresses', None)
class IPConfiguration(SubResource):
"""IP configuration.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param private_ip_address: The private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_09_01.models.IPAllocationMethod
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.Subnet
:param public_ip_address: The reference of the public IP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_09_01.models.PublicIPAddress
:ivar provisioning_state: The provisioning state of the IP configuration resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'PublicIPAddress'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = None
class IPConfigurationProfile(SubResource):
"""IP configuration profile child resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource. This name can be used to access the resource.
:type name: str
:ivar type: Sub Resource type.
:vartype type: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param subnet: The reference of the subnet resource to create a container network interface ip
configuration.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.Subnet
:ivar provisioning_state: The provisioning state of the IP configuration profile resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IPConfigurationProfile, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = None
self.subnet = kwargs.get('subnet', None)
self.provisioning_state = None
class IpGroup(Resource):
"""The IpGroups resource information.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar provisioning_state: The provisioning state of the IpGroups resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param ip_addresses: IpAddresses/IpAddressPrefixes in the IpGroups resource.
:type ip_addresses: list[str]
:ivar firewalls: List of references to Azure resources that this IpGroups is associated with.
:vartype firewalls: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'firewalls': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'ip_addresses': {'key': 'properties.ipAddresses', 'type': '[str]'},
'firewalls': {'key': 'properties.firewalls', 'type': '[SubResource]'},
}
def __init__(
self,
**kwargs
):
super(IpGroup, self).__init__(**kwargs)
self.etag = None
self.provisioning_state = None
self.ip_addresses = kwargs.get('ip_addresses', None)
self.firewalls = None
class IpGroupListResult(msrest.serialization.Model):
"""Response for the ListIpGroups API service call.
:param value: The list of IpGroups information resources.
:type value: list[~azure.mgmt.network.v2019_09_01.models.IpGroup]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[IpGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IpGroupListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class IpsecPolicy(msrest.serialization.Model):
"""An IPSec Policy configuration for a virtual network gateway connection.
All required parameters must be populated in order to send to Azure.
:param sa_life_time_seconds: Required. The IPSec Security Association (also called Quick Mode
or Phase 2 SA) lifetime in seconds for a site to site VPN tunnel.
:type sa_life_time_seconds: int
:param sa_data_size_kilobytes: Required. The IPSec Security Association (also called Quick Mode
or Phase 2 SA) payload size in KB for a site to site VPN tunnel.
:type sa_data_size_kilobytes: int
:param ipsec_encryption: Required. The IPSec encryption algorithm (IKE phase 1). Possible
values include: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192",
"GCMAES256".
:type ipsec_encryption: str or ~azure.mgmt.network.v2019_09_01.models.IpsecEncryption
:param ipsec_integrity: Required. The IPSec integrity algorithm (IKE phase 1). Possible values
include: "MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", "GCMAES256".
:type ipsec_integrity: str or ~azure.mgmt.network.v2019_09_01.models.IpsecIntegrity
:param ike_encryption: Required. The IKE encryption algorithm (IKE phase 2). Possible values
include: "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES256", "GCMAES128".
:type ike_encryption: str or ~azure.mgmt.network.v2019_09_01.models.IkeEncryption
:param ike_integrity: Required. The IKE integrity algorithm (IKE phase 2). Possible values
include: "MD5", "SHA1", "SHA256", "SHA384", "GCMAES256", "GCMAES128".
:type ike_integrity: str or ~azure.mgmt.network.v2019_09_01.models.IkeIntegrity
:param dh_group: Required. The DH Group used in IKE Phase 1 for initial SA. Possible values
include: "None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384",
"DHGroup24".
:type dh_group: str or ~azure.mgmt.network.v2019_09_01.models.DhGroup
:param pfs_group: Required. The Pfs Group used in IKE Phase 2 for new child SA. Possible values
include: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", "PFS24", "PFS14", "PFSMM".
:type pfs_group: str or ~azure.mgmt.network.v2019_09_01.models.PfsGroup
"""
_validation = {
'sa_life_time_seconds': {'required': True},
'sa_data_size_kilobytes': {'required': True},
'ipsec_encryption': {'required': True},
'ipsec_integrity': {'required': True},
'ike_encryption': {'required': True},
'ike_integrity': {'required': True},
'dh_group': {'required': True},
'pfs_group': {'required': True},
}
_attribute_map = {
'sa_life_time_seconds': {'key': 'saLifeTimeSeconds', 'type': 'int'},
'sa_data_size_kilobytes': {'key': 'saDataSizeKilobytes', 'type': 'int'},
'ipsec_encryption': {'key': 'ipsecEncryption', 'type': 'str'},
'ipsec_integrity': {'key': 'ipsecIntegrity', 'type': 'str'},
'ike_encryption': {'key': 'ikeEncryption', 'type': 'str'},
'ike_integrity': {'key': 'ikeIntegrity', 'type': 'str'},
'dh_group': {'key': 'dhGroup', 'type': 'str'},
'pfs_group': {'key': 'pfsGroup', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IpsecPolicy, self).__init__(**kwargs)
self.sa_life_time_seconds = kwargs['sa_life_time_seconds']
self.sa_data_size_kilobytes = kwargs['sa_data_size_kilobytes']
self.ipsec_encryption = kwargs['ipsec_encryption']
self.ipsec_integrity = kwargs['ipsec_integrity']
self.ike_encryption = kwargs['ike_encryption']
self.ike_integrity = kwargs['ike_integrity']
self.dh_group = kwargs['dh_group']
self.pfs_group = kwargs['pfs_group']
class IpTag(msrest.serialization.Model):
"""Contains the IpTag associated with the object.
:param ip_tag_type: The IP tag type. Example: FirstPartyUsage.
:type ip_tag_type: str
:param tag: The value of the IP tag associated with the public IP. Example: SQL.
:type tag: str
"""
_attribute_map = {
'ip_tag_type': {'key': 'ipTagType', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(IpTag, self).__init__(**kwargs)
self.ip_tag_type = kwargs.get('ip_tag_type', None)
self.tag = kwargs.get('tag', None)
class Ipv6ExpressRouteCircuitPeeringConfig(msrest.serialization.Model):
"""Contains IPv6 peering config.
:param primary_peer_address_prefix: The primary address prefix.
:type primary_peer_address_prefix: str
:param secondary_peer_address_prefix: The secondary address prefix.
:type secondary_peer_address_prefix: str
:param microsoft_peering_config: The Microsoft peering configuration.
:type microsoft_peering_config:
~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeeringConfig
:param route_filter: The reference of the RouteFilter resource.
:type route_filter: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param state: The state of peering. Possible values include: "Disabled", "Enabled".
:type state: str or ~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeeringState
"""
_attribute_map = {
'primary_peer_address_prefix': {'key': 'primaryPeerAddressPrefix', 'type': 'str'},
'secondary_peer_address_prefix': {'key': 'secondaryPeerAddressPrefix', 'type': 'str'},
'microsoft_peering_config': {'key': 'microsoftPeeringConfig', 'type': 'ExpressRouteCircuitPeeringConfig'},
'route_filter': {'key': 'routeFilter', 'type': 'SubResource'},
'state': {'key': 'state', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Ipv6ExpressRouteCircuitPeeringConfig, self).__init__(**kwargs)
self.primary_peer_address_prefix = kwargs.get('primary_peer_address_prefix', None)
self.secondary_peer_address_prefix = kwargs.get('secondary_peer_address_prefix', None)
self.microsoft_peering_config = kwargs.get('microsoft_peering_config', None)
self.route_filter = kwargs.get('route_filter', None)
self.state = kwargs.get('state', None)
class ListHubVirtualNetworkConnectionsResult(msrest.serialization.Model):
"""List of HubVirtualNetworkConnections and a URL nextLink to get the next set of results.
:param value: List of HubVirtualNetworkConnections.
:type value: list[~azure.mgmt.network.v2019_09_01.models.HubVirtualNetworkConnection]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[HubVirtualNetworkConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListHubVirtualNetworkConnectionsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListP2SVpnGatewaysResult(msrest.serialization.Model):
"""Result of the request to list P2SVpnGateways. It contains a list of P2SVpnGateways and a URL nextLink to get the next set of results.
:param value: List of P2SVpnGateways.
:type value: list[~azure.mgmt.network.v2019_09_01.models.P2SVpnGateway]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[P2SVpnGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListP2SVpnGatewaysResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVirtualHubRouteTableV2SResult(msrest.serialization.Model):
"""List of VirtualHubRouteTableV2s and a URL nextLink to get the next set of results.
:param value: List of VirtualHubRouteTableV2s.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualHubRouteTableV2]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualHubRouteTableV2]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVirtualHubRouteTableV2SResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVirtualHubsResult(msrest.serialization.Model):
"""Result of the request to list VirtualHubs. It contains a list of VirtualHubs and a URL nextLink to get the next set of results.
:param value: List of VirtualHubs.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualHub]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualHub]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVirtualHubsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVirtualWANsResult(msrest.serialization.Model):
"""Result of the request to list VirtualWANs. It contains a list of VirtualWANs and a URL nextLink to get the next set of results.
:param value: List of VirtualWANs.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualWAN]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualWAN]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVirtualWANsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVpnConnectionsResult(msrest.serialization.Model):
"""Result of the request to list all vpn connections to a virtual wan vpn gateway. It contains a list of Vpn Connections and a URL nextLink to get the next set of results.
:param value: List of Vpn Connections.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VpnConnection]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVpnConnectionsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVpnGatewaysResult(msrest.serialization.Model):
"""Result of the request to list VpnGateways. It contains a list of VpnGateways and a URL nextLink to get the next set of results.
:param value: List of VpnGateways.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VpnGateway]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVpnGatewaysResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVpnServerConfigurationsResult(msrest.serialization.Model):
"""Result of the request to list all VpnServerConfigurations. It contains a list of VpnServerConfigurations and a URL nextLink to get the next set of results.
:param value: List of VpnServerConfigurations.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VpnServerConfiguration]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnServerConfiguration]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVpnServerConfigurationsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVpnSiteLinkConnectionsResult(msrest.serialization.Model):
"""Result of the request to list all vpn connections to a virtual wan vpn gateway. It contains a list of Vpn Connections and a URL nextLink to get the next set of results.
:param value: List of VpnSiteLinkConnections.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VpnSiteLinkConnection]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnSiteLinkConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVpnSiteLinkConnectionsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVpnSiteLinksResult(msrest.serialization.Model):
"""Result of the request to list VpnSiteLinks. It contains a list of VpnSiteLinks and a URL nextLink to get the next set of results.
:param value: List of VpnSitesLinks.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VpnSiteLink]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnSiteLink]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVpnSiteLinksResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ListVpnSitesResult(msrest.serialization.Model):
"""Result of the request to list VpnSites. It contains a list of VpnSites and a URL nextLink to get the next set of results.
:param value: List of VpnSites.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VpnSite]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnSite]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ListVpnSitesResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class LoadBalancer(Resource):
"""LoadBalancer resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The load balancer SKU.
:type sku: ~azure.mgmt.network.v2019_09_01.models.LoadBalancerSku
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param frontend_ip_configurations: Object representing the frontend IPs to be used for the load
balancer.
:type frontend_ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.FrontendIPConfiguration]
:param backend_address_pools: Collection of backend address pools used by a load balancer.
:type backend_address_pools: list[~azure.mgmt.network.v2019_09_01.models.BackendAddressPool]
:param load_balancing_rules: Object collection representing the load balancing rules Gets the
provisioning.
:type load_balancing_rules: list[~azure.mgmt.network.v2019_09_01.models.LoadBalancingRule]
:param probes: Collection of probe objects used in the load balancer.
:type probes: list[~azure.mgmt.network.v2019_09_01.models.Probe]
:param inbound_nat_rules: Collection of inbound NAT Rules used by a load balancer. Defining
inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT
pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are
associated with individual virtual machines cannot reference an Inbound NAT pool. They have to
reference individual inbound NAT rules.
:type inbound_nat_rules: list[~azure.mgmt.network.v2019_09_01.models.InboundNatRule]
:param inbound_nat_pools: Defines an external port range for inbound NAT to a single backend
port on NICs associated with a load balancer. Inbound NAT rules are created automatically for
each NIC associated with the Load Balancer using an external port from this range. Defining an
Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules.
Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with
individual virtual machines cannot reference an inbound NAT pool. They have to reference
individual inbound NAT rules.
:type inbound_nat_pools: list[~azure.mgmt.network.v2019_09_01.models.InboundNatPool]
:param outbound_rules: The outbound rules.
:type outbound_rules: list[~azure.mgmt.network.v2019_09_01.models.OutboundRule]
:ivar resource_guid: The resource GUID property of the load balancer resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the load balancer resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'LoadBalancerSku'},
'etag': {'key': 'etag', 'type': 'str'},
'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[FrontendIPConfiguration]'},
'backend_address_pools': {'key': 'properties.backendAddressPools', 'type': '[BackendAddressPool]'},
'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[LoadBalancingRule]'},
'probes': {'key': 'properties.probes', 'type': '[Probe]'},
'inbound_nat_rules': {'key': 'properties.inboundNatRules', 'type': '[InboundNatRule]'},
'inbound_nat_pools': {'key': 'properties.inboundNatPools', 'type': '[InboundNatPool]'},
'outbound_rules': {'key': 'properties.outboundRules', 'type': '[OutboundRule]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancer, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.etag = None
self.frontend_ip_configurations = kwargs.get('frontend_ip_configurations', None)
self.backend_address_pools = kwargs.get('backend_address_pools', None)
self.load_balancing_rules = kwargs.get('load_balancing_rules', None)
self.probes = kwargs.get('probes', None)
self.inbound_nat_rules = kwargs.get('inbound_nat_rules', None)
self.inbound_nat_pools = kwargs.get('inbound_nat_pools', None)
self.outbound_rules = kwargs.get('outbound_rules', None)
self.resource_guid = None
self.provisioning_state = None
class LoadBalancerBackendAddressPoolListResult(msrest.serialization.Model):
"""Response for ListBackendAddressPool API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of backend address pools in a load balancer.
:type value: list[~azure.mgmt.network.v2019_09_01.models.BackendAddressPool]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[BackendAddressPool]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerBackendAddressPoolListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerFrontendIPConfigurationListResult(msrest.serialization.Model):
"""Response for ListFrontendIPConfiguration API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of frontend IP configurations in a load balancer.
:type value: list[~azure.mgmt.network.v2019_09_01.models.FrontendIPConfiguration]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[FrontendIPConfiguration]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerFrontendIPConfigurationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerListResult(msrest.serialization.Model):
"""Response for ListLoadBalancers API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of load balancers in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.LoadBalancer]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[LoadBalancer]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerLoadBalancingRuleListResult(msrest.serialization.Model):
"""Response for ListLoadBalancingRule API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of load balancing rules in a load balancer.
:type value: list[~azure.mgmt.network.v2019_09_01.models.LoadBalancingRule]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[LoadBalancingRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerLoadBalancingRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerOutboundRuleListResult(msrest.serialization.Model):
"""Response for ListOutboundRule API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of outbound rules in a load balancer.
:type value: list[~azure.mgmt.network.v2019_09_01.models.OutboundRule]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[OutboundRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerOutboundRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerProbeListResult(msrest.serialization.Model):
"""Response for ListProbe API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of probes in a load balancer.
:type value: list[~azure.mgmt.network.v2019_09_01.models.Probe]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Probe]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerProbeListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LoadBalancerSku(msrest.serialization.Model):
"""SKU of a load balancer.
:param name: Name of a load balancer SKU. Possible values include: "Basic", "Standard".
:type name: str or ~azure.mgmt.network.v2019_09_01.models.LoadBalancerSkuName
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancerSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class LoadBalancingRule(SubResource):
"""A load balancing rule for a load balancer.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the set of load balancing rules
used by the load balancer. This name can be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param frontend_ip_configuration: A reference to frontend IP addresses.
:type frontend_ip_configuration: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param backend_address_pool: A reference to a pool of DIPs. Inbound traffic is randomly load
balanced across IPs in the backend IPs.
:type backend_address_pool: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param probe: The reference of the load balancer probe used by the load balancing rule.
:type probe: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param protocol: The reference to the transport protocol used by the load balancing rule.
Possible values include: "Udp", "Tcp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.TransportProtocol
:param load_distribution: The load distribution policy for this rule. Possible values include:
"Default", "SourceIP", "SourceIPProtocol".
:type load_distribution: str or ~azure.mgmt.network.v2019_09_01.models.LoadDistribution
:param frontend_port: The port for the external endpoint. Port numbers for each rule must be
unique within the Load Balancer. Acceptable values are between 0 and 65534. Note that value 0
enables "Any Port".
:type frontend_port: int
:param backend_port: The port used for internal connections on the endpoint. Acceptable values
are between 0 and 65535. Note that value 0 enables "Any Port".
:type backend_port: int
:param idle_timeout_in_minutes: The timeout for the TCP idle connection. The value can be set
between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the
protocol is set to TCP.
:type idle_timeout_in_minutes: int
:param enable_floating_ip: Configures a virtual machine's endpoint for the floating IP
capability required to configure a SQL AlwaysOn Availability Group. This setting is required
when using the SQL AlwaysOn Availability Groups in SQL server. This setting can't be changed
after you create the endpoint.
:type enable_floating_ip: bool
:param enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected
connection termination. This element is only used when the protocol is set to TCP.
:type enable_tcp_reset: bool
:param disable_outbound_snat: Configures SNAT for the VMs in the backend pool to use the
publicIP address specified in the frontend of the load balancing rule.
:type disable_outbound_snat: bool
:ivar provisioning_state: The provisioning state of the load balancing rule resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'frontend_ip_configuration': {'key': 'properties.frontendIPConfiguration', 'type': 'SubResource'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'probe': {'key': 'properties.probe', 'type': 'SubResource'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'load_distribution': {'key': 'properties.loadDistribution', 'type': 'str'},
'frontend_port': {'key': 'properties.frontendPort', 'type': 'int'},
'backend_port': {'key': 'properties.backendPort', 'type': 'int'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'enable_floating_ip': {'key': 'properties.enableFloatingIP', 'type': 'bool'},
'enable_tcp_reset': {'key': 'properties.enableTcpReset', 'type': 'bool'},
'disable_outbound_snat': {'key': 'properties.disableOutboundSnat', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LoadBalancingRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.frontend_ip_configuration = kwargs.get('frontend_ip_configuration', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.probe = kwargs.get('probe', None)
self.protocol = kwargs.get('protocol', None)
self.load_distribution = kwargs.get('load_distribution', None)
self.frontend_port = kwargs.get('frontend_port', None)
self.backend_port = kwargs.get('backend_port', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.enable_floating_ip = kwargs.get('enable_floating_ip', None)
self.enable_tcp_reset = kwargs.get('enable_tcp_reset', None)
self.disable_outbound_snat = kwargs.get('disable_outbound_snat', None)
self.provisioning_state = None
class LocalNetworkGateway(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param local_network_address_space: Local network site address space.
:type local_network_address_space: ~azure.mgmt.network.v2019_09_01.models.AddressSpace
:param gateway_ip_address: IP address of local network gateway.
:type gateway_ip_address: str
:param bgp_settings: Local network gateway's BGP speaker settings.
:type bgp_settings: ~azure.mgmt.network.v2019_09_01.models.BgpSettings
:ivar resource_guid: The resource GUID property of the local network gateway resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the local network gateway resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'local_network_address_space': {'key': 'properties.localNetworkAddressSpace', 'type': 'AddressSpace'},
'gateway_ip_address': {'key': 'properties.gatewayIpAddress', 'type': 'str'},
'bgp_settings': {'key': 'properties.bgpSettings', 'type': 'BgpSettings'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LocalNetworkGateway, self).__init__(**kwargs)
self.etag = None
self.local_network_address_space = kwargs.get('local_network_address_space', None)
self.gateway_ip_address = kwargs.get('gateway_ip_address', None)
self.bgp_settings = kwargs.get('bgp_settings', None)
self.resource_guid = None
self.provisioning_state = None
class LocalNetworkGatewayListResult(msrest.serialization.Model):
"""Response for ListLocalNetworkGateways API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of local network gateways that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.LocalNetworkGateway]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[LocalNetworkGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LocalNetworkGatewayListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class LogSpecification(msrest.serialization.Model):
"""Description of logging specification.
:param name: The name of the specification.
:type name: str
:param display_name: The display name of the specification.
:type display_name: str
:param blob_duration: Duration of the blob.
:type blob_duration: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'blob_duration': {'key': 'blobDuration', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(LogSpecification, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.blob_duration = kwargs.get('blob_duration', None)
class ManagedRuleGroupOverride(msrest.serialization.Model):
"""Defines a managed rule group override setting.
All required parameters must be populated in order to send to Azure.
:param rule_group_name: Required. Describes the managed rule group to override.
:type rule_group_name: str
:param rules: List of rules that will be disabled. If none specified, all rules in the group
will be disabled.
:type rules: list[~azure.mgmt.network.v2019_09_01.models.ManagedRuleOverride]
"""
_validation = {
'rule_group_name': {'required': True},
}
_attribute_map = {
'rule_group_name': {'key': 'ruleGroupName', 'type': 'str'},
'rules': {'key': 'rules', 'type': '[ManagedRuleOverride]'},
}
def __init__(
self,
**kwargs
):
super(ManagedRuleGroupOverride, self).__init__(**kwargs)
self.rule_group_name = kwargs['rule_group_name']
self.rules = kwargs.get('rules', None)
class ManagedRuleOverride(msrest.serialization.Model):
"""Defines a managed rule group override setting.
All required parameters must be populated in order to send to Azure.
:param rule_id: Required. Identifier for the managed rule.
:type rule_id: str
:param state: Describes the state of the managed rule. Defaults to Disabled if not specified.
Possible values include: "Disabled".
:type state: str or ~azure.mgmt.network.v2019_09_01.models.ManagedRuleEnabledState
"""
_validation = {
'rule_id': {'required': True},
}
_attribute_map = {
'rule_id': {'key': 'ruleId', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ManagedRuleOverride, self).__init__(**kwargs)
self.rule_id = kwargs['rule_id']
self.state = kwargs.get('state', None)
class ManagedRulesDefinition(msrest.serialization.Model):
"""Allow to exclude some variable satisfy the condition for the WAF check.
All required parameters must be populated in order to send to Azure.
:param exclusions: Describes the Exclusions that are applied on the policy.
:type exclusions: list[~azure.mgmt.network.v2019_09_01.models.OwaspCrsExclusionEntry]
:param managed_rule_sets: Required. Describes the ruleSets that are associated with the policy.
:type managed_rule_sets: list[~azure.mgmt.network.v2019_09_01.models.ManagedRuleSet]
"""
_validation = {
'managed_rule_sets': {'required': True},
}
_attribute_map = {
'exclusions': {'key': 'exclusions', 'type': '[OwaspCrsExclusionEntry]'},
'managed_rule_sets': {'key': 'managedRuleSets', 'type': '[ManagedRuleSet]'},
}
def __init__(
self,
**kwargs
):
super(ManagedRulesDefinition, self).__init__(**kwargs)
self.exclusions = kwargs.get('exclusions', None)
self.managed_rule_sets = kwargs['managed_rule_sets']
class ManagedRuleSet(msrest.serialization.Model):
"""Defines a managed rule set.
All required parameters must be populated in order to send to Azure.
:param rule_set_type: Required. Defines the rule set type to use.
:type rule_set_type: str
:param rule_set_version: Required. Defines the version of the rule set to use.
:type rule_set_version: str
:param rule_group_overrides: Defines the rule group overrides to apply to the rule set.
:type rule_group_overrides:
list[~azure.mgmt.network.v2019_09_01.models.ManagedRuleGroupOverride]
"""
_validation = {
'rule_set_type': {'required': True},
'rule_set_version': {'required': True},
}
_attribute_map = {
'rule_set_type': {'key': 'ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'ruleSetVersion', 'type': 'str'},
'rule_group_overrides': {'key': 'ruleGroupOverrides', 'type': '[ManagedRuleGroupOverride]'},
}
def __init__(
self,
**kwargs
):
super(ManagedRuleSet, self).__init__(**kwargs)
self.rule_set_type = kwargs['rule_set_type']
self.rule_set_version = kwargs['rule_set_version']
self.rule_group_overrides = kwargs.get('rule_group_overrides', None)
class ManagedServiceIdentity(msrest.serialization.Model):
"""Identity for the resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar principal_id: The principal id of the system assigned identity. This property will only
be provided for a system assigned identity.
:vartype principal_id: str
:ivar tenant_id: The tenant id of the system assigned identity. This property will only be
provided for a system assigned identity.
:vartype tenant_id: str
:param type: The type of identity used for the resource. The type 'SystemAssigned,
UserAssigned' includes both an implicitly created identity and a set of user assigned
identities. The type 'None' will remove any identities from the virtual machine. Possible
values include: "SystemAssigned", "UserAssigned", "SystemAssigned, UserAssigned", "None".
:type type: str or ~azure.mgmt.network.v2019_09_01.models.ResourceIdentityType
:param user_assigned_identities: The list of user identities associated with resource. The user
identity dictionary key references will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
:type user_assigned_identities: dict[str,
~azure.mgmt.network.v2019_09_01.models.Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties]
"""
_validation = {
'principal_id': {'readonly': True},
'tenant_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties}'},
}
def __init__(
self,
**kwargs
):
super(ManagedServiceIdentity, self).__init__(**kwargs)
self.principal_id = None
self.tenant_id = None
self.type = kwargs.get('type', None)
self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
class MatchCondition(msrest.serialization.Model):
"""Define match conditions.
All required parameters must be populated in order to send to Azure.
:param match_variables: Required. List of match variables.
:type match_variables: list[~azure.mgmt.network.v2019_09_01.models.MatchVariable]
:param operator: Required. Describes operator to be matched. Possible values include:
"IPMatch", "Equal", "Contains", "LessThan", "GreaterThan", "LessThanOrEqual",
"GreaterThanOrEqual", "BeginsWith", "EndsWith", "Regex", "GeoMatch".
:type operator: str or ~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallOperator
:param negation_conditon: Describes if this is negate condition or not.
:type negation_conditon: bool
:param match_values: Required. Match value.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or
~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallTransform]
"""
_validation = {
'match_variables': {'required': True},
'operator': {'required': True},
'match_values': {'required': True},
}
_attribute_map = {
'match_variables': {'key': 'matchVariables', 'type': '[MatchVariable]'},
'operator': {'key': 'operator', 'type': 'str'},
'negation_conditon': {'key': 'negationConditon', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(MatchCondition, self).__init__(**kwargs)
self.match_variables = kwargs['match_variables']
self.operator = kwargs['operator']
self.negation_conditon = kwargs.get('negation_conditon', None)
self.match_values = kwargs['match_values']
self.transforms = kwargs.get('transforms', None)
class MatchedRule(msrest.serialization.Model):
"""Matched rule.
:param rule_name: Name of the matched network security rule.
:type rule_name: str
:param action: The network traffic is allowed or denied. Possible values are 'Allow' and
'Deny'.
:type action: str
"""
_attribute_map = {
'rule_name': {'key': 'ruleName', 'type': 'str'},
'action': {'key': 'action', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MatchedRule, self).__init__(**kwargs)
self.rule_name = kwargs.get('rule_name', None)
self.action = kwargs.get('action', None)
class MatchVariable(msrest.serialization.Model):
"""Define match variables.
All required parameters must be populated in order to send to Azure.
:param variable_name: Required. Match Variable. Possible values include: "RemoteAddr",
"RequestMethod", "QueryString", "PostArgs", "RequestUri", "RequestHeaders", "RequestBody",
"RequestCookies".
:type variable_name: str or
~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallMatchVariable
:param selector: Describes field of the matchVariable collection.
:type selector: str
"""
_validation = {
'variable_name': {'required': True},
}
_attribute_map = {
'variable_name': {'key': 'variableName', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MatchVariable, self).__init__(**kwargs)
self.variable_name = kwargs['variable_name']
self.selector = kwargs.get('selector', None)
class MetricSpecification(msrest.serialization.Model):
"""Description of metrics specification.
:param name: The name of the metric.
:type name: str
:param display_name: The display name of the metric.
:type display_name: str
:param display_description: The description of the metric.
:type display_description: str
:param unit: Units the metric to be displayed in.
:type unit: str
:param aggregation_type: The aggregation type.
:type aggregation_type: str
:param availabilities: List of availability.
:type availabilities: list[~azure.mgmt.network.v2019_09_01.models.Availability]
:param enable_regional_mdm_account: Whether regional MDM account enabled.
:type enable_regional_mdm_account: bool
:param fill_gap_with_zero: Whether gaps would be filled with zeros.
:type fill_gap_with_zero: bool
:param metric_filter_pattern: Pattern for the filter of the metric.
:type metric_filter_pattern: str
:param dimensions: List of dimensions.
:type dimensions: list[~azure.mgmt.network.v2019_09_01.models.Dimension]
:param is_internal: Whether the metric is internal.
:type is_internal: bool
:param source_mdm_account: The source MDM account.
:type source_mdm_account: str
:param source_mdm_namespace: The source MDM namespace.
:type source_mdm_namespace: str
:param resource_id_dimension_name_override: The resource Id dimension name override.
:type resource_id_dimension_name_override: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'display_description': {'key': 'displayDescription', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'aggregation_type': {'key': 'aggregationType', 'type': 'str'},
'availabilities': {'key': 'availabilities', 'type': '[Availability]'},
'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'bool'},
'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'},
'metric_filter_pattern': {'key': 'metricFilterPattern', 'type': 'str'},
'dimensions': {'key': 'dimensions', 'type': '[Dimension]'},
'is_internal': {'key': 'isInternal', 'type': 'bool'},
'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'},
'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'},
'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MetricSpecification, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.display_description = kwargs.get('display_description', None)
self.unit = kwargs.get('unit', None)
self.aggregation_type = kwargs.get('aggregation_type', None)
self.availabilities = kwargs.get('availabilities', None)
self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None)
self.fill_gap_with_zero = kwargs.get('fill_gap_with_zero', None)
self.metric_filter_pattern = kwargs.get('metric_filter_pattern', None)
self.dimensions = kwargs.get('dimensions', None)
self.is_internal = kwargs.get('is_internal', None)
self.source_mdm_account = kwargs.get('source_mdm_account', None)
self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None)
self.resource_id_dimension_name_override = kwargs.get('resource_id_dimension_name_override', None)
class NatGateway(Resource):
"""Nat Gateway resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The nat gateway SKU.
:type sku: ~azure.mgmt.network.v2019_09_01.models.NatGatewaySku
:param zones: A list of availability zones denoting the zone in which Nat Gateway should be
deployed.
:type zones: list[str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param idle_timeout_in_minutes: The idle timeout of the nat gateway.
:type idle_timeout_in_minutes: int
:param public_ip_addresses: An array of public ip addresses associated with the nat gateway
resource.
:type public_ip_addresses: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param public_ip_prefixes: An array of public ip prefixes associated with the nat gateway
resource.
:type public_ip_prefixes: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar subnets: An array of references to the subnets using this nat gateway resource.
:vartype subnets: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar resource_guid: The resource GUID property of the NAT gateway resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the NAT gateway resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'subnets': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'NatGatewaySku'},
'zones': {'key': 'zones', 'type': '[str]'},
'etag': {'key': 'etag', 'type': 'str'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'public_ip_addresses': {'key': 'properties.publicIpAddresses', 'type': '[SubResource]'},
'public_ip_prefixes': {'key': 'properties.publicIpPrefixes', 'type': '[SubResource]'},
'subnets': {'key': 'properties.subnets', 'type': '[SubResource]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NatGateway, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.zones = kwargs.get('zones', None)
self.etag = None
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.public_ip_addresses = kwargs.get('public_ip_addresses', None)
self.public_ip_prefixes = kwargs.get('public_ip_prefixes', None)
self.subnets = None
self.resource_guid = None
self.provisioning_state = None
class NatGatewayListResult(msrest.serialization.Model):
"""Response for ListNatGateways API service call.
:param value: A list of Nat Gateways that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.NatGateway]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[NatGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NatGatewayListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class NatGatewaySku(msrest.serialization.Model):
"""SKU of nat gateway.
:param name: Name of Nat Gateway SKU. Possible values include: "Standard".
:type name: str or ~azure.mgmt.network.v2019_09_01.models.NatGatewaySkuName
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NatGatewaySku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class NetworkConfigurationDiagnosticParameters(msrest.serialization.Model):
"""Parameters to get network configuration diagnostic.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The ID of the target resource to perform network
configuration diagnostic. Valid options are VM, NetworkInterface, VMSS/NetworkInterface and
Application Gateway.
:type target_resource_id: str
:param verbosity_level: Verbosity level. Possible values include: "Normal", "Minimum", "Full".
:type verbosity_level: str or ~azure.mgmt.network.v2019_09_01.models.VerbosityLevel
:param profiles: Required. List of network configuration diagnostic profiles.
:type profiles:
list[~azure.mgmt.network.v2019_09_01.models.NetworkConfigurationDiagnosticProfile]
"""
_validation = {
'target_resource_id': {'required': True},
'profiles': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'verbosity_level': {'key': 'verbosityLevel', 'type': 'str'},
'profiles': {'key': 'profiles', 'type': '[NetworkConfigurationDiagnosticProfile]'},
}
def __init__(
self,
**kwargs
):
super(NetworkConfigurationDiagnosticParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.verbosity_level = kwargs.get('verbosity_level', None)
self.profiles = kwargs['profiles']
class NetworkConfigurationDiagnosticProfile(msrest.serialization.Model):
"""Parameters to compare with network configuration.
All required parameters must be populated in order to send to Azure.
:param direction: Required. The direction of the traffic. Possible values include: "Inbound",
"Outbound".
:type direction: str or ~azure.mgmt.network.v2019_09_01.models.Direction
:param protocol: Required. Protocol to be verified on. Accepted values are '*', TCP, UDP.
:type protocol: str
:param source: Required. Traffic source. Accepted values are '*', IP Address/CIDR, Service Tag.
:type source: str
:param destination: Required. Traffic destination. Accepted values are: '*', IP Address/CIDR,
Service Tag.
:type destination: str
:param destination_port: Required. Traffic destination port. Accepted values are '*' and a
single port in the range (0 - 65535).
:type destination_port: str
"""
_validation = {
'direction': {'required': True},
'protocol': {'required': True},
'source': {'required': True},
'destination': {'required': True},
'destination_port': {'required': True},
}
_attribute_map = {
'direction': {'key': 'direction', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'destination': {'key': 'destination', 'type': 'str'},
'destination_port': {'key': 'destinationPort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkConfigurationDiagnosticProfile, self).__init__(**kwargs)
self.direction = kwargs['direction']
self.protocol = kwargs['protocol']
self.source = kwargs['source']
self.destination = kwargs['destination']
self.destination_port = kwargs['destination_port']
class NetworkConfigurationDiagnosticResponse(msrest.serialization.Model):
"""Results of network configuration diagnostic on the target resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar results: List of network configuration diagnostic results.
:vartype results:
list[~azure.mgmt.network.v2019_09_01.models.NetworkConfigurationDiagnosticResult]
"""
_validation = {
'results': {'readonly': True},
}
_attribute_map = {
'results': {'key': 'results', 'type': '[NetworkConfigurationDiagnosticResult]'},
}
def __init__(
self,
**kwargs
):
super(NetworkConfigurationDiagnosticResponse, self).__init__(**kwargs)
self.results = None
class NetworkConfigurationDiagnosticResult(msrest.serialization.Model):
"""Network configuration diagnostic result corresponded to provided traffic query.
:param profile: Network configuration diagnostic profile.
:type profile: ~azure.mgmt.network.v2019_09_01.models.NetworkConfigurationDiagnosticProfile
:param network_security_group_result: Network security group result.
:type network_security_group_result:
~azure.mgmt.network.v2019_09_01.models.NetworkSecurityGroupResult
"""
_attribute_map = {
'profile': {'key': 'profile', 'type': 'NetworkConfigurationDiagnosticProfile'},
'network_security_group_result': {'key': 'networkSecurityGroupResult', 'type': 'NetworkSecurityGroupResult'},
}
def __init__(
self,
**kwargs
):
super(NetworkConfigurationDiagnosticResult, self).__init__(**kwargs)
self.profile = kwargs.get('profile', None)
self.network_security_group_result = kwargs.get('network_security_group_result', None)
class NetworkIntentPolicy(Resource):
"""Network Intent Policy resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkIntentPolicy, self).__init__(**kwargs)
self.etag = None
class NetworkIntentPolicyConfiguration(msrest.serialization.Model):
"""Details of NetworkIntentPolicyConfiguration for PrepareNetworkPoliciesRequest.
:param network_intent_policy_name: The name of the Network Intent Policy for storing in target
subscription.
:type network_intent_policy_name: str
:param source_network_intent_policy: Source network intent policy.
:type source_network_intent_policy: ~azure.mgmt.network.v2019_09_01.models.NetworkIntentPolicy
"""
_attribute_map = {
'network_intent_policy_name': {'key': 'networkIntentPolicyName', 'type': 'str'},
'source_network_intent_policy': {'key': 'sourceNetworkIntentPolicy', 'type': 'NetworkIntentPolicy'},
}
def __init__(
self,
**kwargs
):
super(NetworkIntentPolicyConfiguration, self).__init__(**kwargs)
self.network_intent_policy_name = kwargs.get('network_intent_policy_name', None)
self.source_network_intent_policy = kwargs.get('source_network_intent_policy', None)
class NetworkInterface(Resource):
"""A network interface in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar virtual_machine: The reference of a virtual machine.
:vartype virtual_machine: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param network_security_group: The reference of the NetworkSecurityGroup resource.
:type network_security_group: ~azure.mgmt.network.v2019_09_01.models.NetworkSecurityGroup
:ivar private_endpoint: A reference to the private endpoint to which the network interface is
linked.
:vartype private_endpoint: ~azure.mgmt.network.v2019_09_01.models.PrivateEndpoint
:param ip_configurations: A list of IPConfigurations of the network interface.
:type ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceIPConfiguration]
:ivar tap_configurations: A list of TapConfigurations of the network interface.
:vartype tap_configurations:
list[~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceTapConfiguration]
:param dns_settings: The DNS settings in network interface.
:type dns_settings: ~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceDnsSettings
:ivar mac_address: The MAC address of the network interface.
:vartype mac_address: str
:ivar primary: Whether this is a primary network interface on a virtual machine.
:vartype primary: bool
:param enable_accelerated_networking: If the network interface is accelerated networking
enabled.
:type enable_accelerated_networking: bool
:param enable_ip_forwarding: Indicates whether IP forwarding is enabled on this network
interface.
:type enable_ip_forwarding: bool
:ivar hosted_workloads: A list of references to linked BareMetal resources.
:vartype hosted_workloads: list[str]
:ivar resource_guid: The resource GUID property of the network interface resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the network interface resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'virtual_machine': {'readonly': True},
'private_endpoint': {'readonly': True},
'tap_configurations': {'readonly': True},
'mac_address': {'readonly': True},
'primary': {'readonly': True},
'hosted_workloads': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_machine': {'key': 'properties.virtualMachine', 'type': 'SubResource'},
'network_security_group': {'key': 'properties.networkSecurityGroup', 'type': 'NetworkSecurityGroup'},
'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'},
'tap_configurations': {'key': 'properties.tapConfigurations', 'type': '[NetworkInterfaceTapConfiguration]'},
'dns_settings': {'key': 'properties.dnsSettings', 'type': 'NetworkInterfaceDnsSettings'},
'mac_address': {'key': 'properties.macAddress', 'type': 'str'},
'primary': {'key': 'properties.primary', 'type': 'bool'},
'enable_accelerated_networking': {'key': 'properties.enableAcceleratedNetworking', 'type': 'bool'},
'enable_ip_forwarding': {'key': 'properties.enableIPForwarding', 'type': 'bool'},
'hosted_workloads': {'key': 'properties.hostedWorkloads', 'type': '[str]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterface, self).__init__(**kwargs)
self.etag = None
self.virtual_machine = None
self.network_security_group = kwargs.get('network_security_group', None)
self.private_endpoint = None
self.ip_configurations = kwargs.get('ip_configurations', None)
self.tap_configurations = None
self.dns_settings = kwargs.get('dns_settings', None)
self.mac_address = None
self.primary = None
self.enable_accelerated_networking = kwargs.get('enable_accelerated_networking', None)
self.enable_ip_forwarding = kwargs.get('enable_ip_forwarding', None)
self.hosted_workloads = None
self.resource_guid = None
self.provisioning_state = None
class NetworkInterfaceAssociation(msrest.serialization.Model):
"""Network interface and its custom security rules.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Network interface ID.
:vartype id: str
:param security_rules: Collection of custom security rules.
:type security_rules: list[~azure.mgmt.network.v2019_09_01.models.SecurityRule]
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceAssociation, self).__init__(**kwargs)
self.id = None
self.security_rules = kwargs.get('security_rules', None)
class NetworkInterfaceDnsSettings(msrest.serialization.Model):
"""DNS settings of a network interface.
Variables are only populated by the server, and will be ignored when sending a request.
:param dns_servers: List of DNS servers IP addresses. Use 'AzureProvidedDNS' to switch to azure
provided DNS resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it must be
the only value in dnsServers collection.
:type dns_servers: list[str]
:ivar applied_dns_servers: If the VM that uses this NIC is part of an Availability Set, then
this list will have the union of all DNS servers from all NICs that are part of the
Availability Set. This property is what is configured on each of those VMs.
:vartype applied_dns_servers: list[str]
:param internal_dns_name_label: Relative DNS name for this NIC used for internal communications
between VMs in the same virtual network.
:type internal_dns_name_label: str
:ivar internal_fqdn: Fully qualified DNS name supporting internal communications between VMs in
the same virtual network.
:vartype internal_fqdn: str
:ivar internal_domain_name_suffix: Even if internalDnsNameLabel is not specified, a DNS entry
is created for the primary NIC of the VM. This DNS name can be constructed by concatenating the
VM name with the value of internalDomainNameSuffix.
:vartype internal_domain_name_suffix: str
"""
_validation = {
'applied_dns_servers': {'readonly': True},
'internal_fqdn': {'readonly': True},
'internal_domain_name_suffix': {'readonly': True},
}
_attribute_map = {
'dns_servers': {'key': 'dnsServers', 'type': '[str]'},
'applied_dns_servers': {'key': 'appliedDnsServers', 'type': '[str]'},
'internal_dns_name_label': {'key': 'internalDnsNameLabel', 'type': 'str'},
'internal_fqdn': {'key': 'internalFqdn', 'type': 'str'},
'internal_domain_name_suffix': {'key': 'internalDomainNameSuffix', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceDnsSettings, self).__init__(**kwargs)
self.dns_servers = kwargs.get('dns_servers', None)
self.applied_dns_servers = None
self.internal_dns_name_label = kwargs.get('internal_dns_name_label', None)
self.internal_fqdn = None
self.internal_domain_name_suffix = None
class NetworkInterfaceIPConfiguration(SubResource):
"""IPConfiguration in a network interface.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_network_taps: The reference to Virtual Network Taps.
:type virtual_network_taps: list[~azure.mgmt.network.v2019_09_01.models.VirtualNetworkTap]
:param application_gateway_backend_address_pools: The reference of
ApplicationGatewayBackendAddressPool resource.
:type application_gateway_backend_address_pools:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationGatewayBackendAddressPool]
:param load_balancer_backend_address_pools: The reference of LoadBalancerBackendAddressPool
resource.
:type load_balancer_backend_address_pools:
list[~azure.mgmt.network.v2019_09_01.models.BackendAddressPool]
:param load_balancer_inbound_nat_rules: A list of references of LoadBalancerInboundNatRules.
:type load_balancer_inbound_nat_rules:
list[~azure.mgmt.network.v2019_09_01.models.InboundNatRule]
:param private_ip_address: Private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_09_01.models.IPAllocationMethod
:param private_ip_address_version: Whether the specific IP configuration is IPv4 or IPv6.
Default is IPv4. Possible values include: "IPv4", "IPv6".
:type private_ip_address_version: str or ~azure.mgmt.network.v2019_09_01.models.IPVersion
:param subnet: Subnet bound to the IP configuration.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.Subnet
:param primary: Whether this is a primary customer address on the network interface.
:type primary: bool
:param public_ip_address: Public IP address bound to the IP configuration.
:type public_ip_address: ~azure.mgmt.network.v2019_09_01.models.PublicIPAddress
:param application_security_groups: Application security groups in which the IP configuration
is included.
:type application_security_groups:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationSecurityGroup]
:ivar provisioning_state: The provisioning state of the network interface IP configuration.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:ivar private_link_connection_properties: PrivateLinkConnection properties for the network
interface.
:vartype private_link_connection_properties:
~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceIPConfigurationPrivateLinkConnectionProperties
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'private_link_connection_properties': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_network_taps': {'key': 'properties.virtualNetworkTaps', 'type': '[VirtualNetworkTap]'},
'application_gateway_backend_address_pools': {'key': 'properties.applicationGatewayBackendAddressPools', 'type': '[ApplicationGatewayBackendAddressPool]'},
'load_balancer_backend_address_pools': {'key': 'properties.loadBalancerBackendAddressPools', 'type': '[BackendAddressPool]'},
'load_balancer_inbound_nat_rules': {'key': 'properties.loadBalancerInboundNatRules', 'type': '[InboundNatRule]'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'private_ip_address_version': {'key': 'properties.privateIPAddressVersion', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'primary': {'key': 'properties.primary', 'type': 'bool'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'PublicIPAddress'},
'application_security_groups': {'key': 'properties.applicationSecurityGroups', 'type': '[ApplicationSecurityGroup]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_link_connection_properties': {'key': 'properties.privateLinkConnectionProperties', 'type': 'NetworkInterfaceIPConfigurationPrivateLinkConnectionProperties'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.virtual_network_taps = kwargs.get('virtual_network_taps', None)
self.application_gateway_backend_address_pools = kwargs.get('application_gateway_backend_address_pools', None)
self.load_balancer_backend_address_pools = kwargs.get('load_balancer_backend_address_pools', None)
self.load_balancer_inbound_nat_rules = kwargs.get('load_balancer_inbound_nat_rules', None)
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.private_ip_address_version = kwargs.get('private_ip_address_version', None)
self.subnet = kwargs.get('subnet', None)
self.primary = kwargs.get('primary', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.application_security_groups = kwargs.get('application_security_groups', None)
self.provisioning_state = None
self.private_link_connection_properties = None
class NetworkInterfaceIPConfigurationListResult(msrest.serialization.Model):
"""Response for list ip configurations API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of ip configurations.
:type value: list[~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceIPConfiguration]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkInterfaceIPConfiguration]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceIPConfigurationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class NetworkInterfaceIPConfigurationPrivateLinkConnectionProperties(msrest.serialization.Model):
"""PrivateLinkConnection properties for the network interface.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar group_id: The group ID for current private link connection.
:vartype group_id: str
:ivar required_member_name: The required member name for current private link connection.
:vartype required_member_name: str
:ivar fqdns: List of FQDNs for current private link connection.
:vartype fqdns: list[str]
"""
_validation = {
'group_id': {'readonly': True},
'required_member_name': {'readonly': True},
'fqdns': {'readonly': True},
}
_attribute_map = {
'group_id': {'key': 'groupId', 'type': 'str'},
'required_member_name': {'key': 'requiredMemberName', 'type': 'str'},
'fqdns': {'key': 'fqdns', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceIPConfigurationPrivateLinkConnectionProperties, self).__init__(**kwargs)
self.group_id = None
self.required_member_name = None
self.fqdns = None
class NetworkInterfaceListResult(msrest.serialization.Model):
"""Response for the ListNetworkInterface API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of network interfaces in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.NetworkInterface]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkInterface]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class NetworkInterfaceLoadBalancerListResult(msrest.serialization.Model):
"""Response for list ip configurations API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of load balancers.
:type value: list[~azure.mgmt.network.v2019_09_01.models.LoadBalancer]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[LoadBalancer]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceLoadBalancerListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class NetworkInterfaceTapConfiguration(SubResource):
"""Tap configuration in a Network Interface.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Sub Resource type.
:vartype type: str
:param virtual_network_tap: The reference of the Virtual Network Tap resource.
:type virtual_network_tap: ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkTap
:ivar provisioning_state: The provisioning state of the network interface tap configuration
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'virtual_network_tap': {'key': 'properties.virtualNetworkTap', 'type': 'VirtualNetworkTap'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceTapConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.virtual_network_tap = kwargs.get('virtual_network_tap', None)
self.provisioning_state = None
class NetworkInterfaceTapConfigurationListResult(msrest.serialization.Model):
"""Response for list tap configurations API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of tap configurations.
:type value: list[~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceTapConfiguration]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkInterfaceTapConfiguration]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkInterfaceTapConfigurationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class NetworkProfile(Resource):
"""Network profile resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar container_network_interfaces: List of child container network interfaces.
:vartype container_network_interfaces:
list[~azure.mgmt.network.v2019_09_01.models.ContainerNetworkInterface]
:param container_network_interface_configurations: List of chid container network interface
configurations.
:type container_network_interface_configurations:
list[~azure.mgmt.network.v2019_09_01.models.ContainerNetworkInterfaceConfiguration]
:ivar resource_guid: The resource GUID property of the network profile resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the network profile resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'container_network_interfaces': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'container_network_interfaces': {'key': 'properties.containerNetworkInterfaces', 'type': '[ContainerNetworkInterface]'},
'container_network_interface_configurations': {'key': 'properties.containerNetworkInterfaceConfigurations', 'type': '[ContainerNetworkInterfaceConfiguration]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkProfile, self).__init__(**kwargs)
self.etag = None
self.container_network_interfaces = None
self.container_network_interface_configurations = kwargs.get('container_network_interface_configurations', None)
self.resource_guid = None
self.provisioning_state = None
class NetworkProfileListResult(msrest.serialization.Model):
"""Response for ListNetworkProfiles API service call.
:param value: A list of network profiles that exist in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.NetworkProfile]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkProfile]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkProfileListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class NetworkRuleCondition(FirewallPolicyRuleCondition):
"""Rule condition of type network.
All required parameters must be populated in order to send to Azure.
:param name: Name of the rule condition.
:type name: str
:param description: Description of the rule condition.
:type description: str
:param rule_condition_type: Required. Rule Condition Type.Constant filled by server. Possible
values include: "ApplicationRuleCondition", "NetworkRuleCondition".
:type rule_condition_type: str or
~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleConditionType
:param ip_protocols: Array of FirewallPolicyRuleConditionNetworkProtocols.
:type ip_protocols: list[str or
~azure.mgmt.network.v2019_09_01.models.FirewallPolicyRuleConditionNetworkProtocol]
:param source_addresses: List of source IP addresses for this rule.
:type source_addresses: list[str]
:param destination_addresses: List of destination IP addresses or Service Tags.
:type destination_addresses: list[str]
:param destination_ports: List of destination ports.
:type destination_ports: list[str]
"""
_validation = {
'rule_condition_type': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'rule_condition_type': {'key': 'ruleConditionType', 'type': 'str'},
'ip_protocols': {'key': 'ipProtocols', 'type': '[str]'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'},
'destination_ports': {'key': 'destinationPorts', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(NetworkRuleCondition, self).__init__(**kwargs)
self.rule_condition_type = 'NetworkRuleCondition' # type: str
self.ip_protocols = kwargs.get('ip_protocols', None)
self.source_addresses = kwargs.get('source_addresses', None)
self.destination_addresses = kwargs.get('destination_addresses', None)
self.destination_ports = kwargs.get('destination_ports', None)
class NetworkSecurityGroup(Resource):
"""NetworkSecurityGroup resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param security_rules: A collection of security rules of the network security group.
:type security_rules: list[~azure.mgmt.network.v2019_09_01.models.SecurityRule]
:ivar default_security_rules: The default security rules of network security group.
:vartype default_security_rules: list[~azure.mgmt.network.v2019_09_01.models.SecurityRule]
:ivar network_interfaces: A collection of references to network interfaces.
:vartype network_interfaces: list[~azure.mgmt.network.v2019_09_01.models.NetworkInterface]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2019_09_01.models.Subnet]
:ivar resource_guid: The resource GUID property of the network security group resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the network security group resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'default_security_rules': {'readonly': True},
'network_interfaces': {'readonly': True},
'subnets': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'security_rules': {'key': 'properties.securityRules', 'type': '[SecurityRule]'},
'default_security_rules': {'key': 'properties.defaultSecurityRules', 'type': '[SecurityRule]'},
'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkSecurityGroup, self).__init__(**kwargs)
self.etag = None
self.security_rules = kwargs.get('security_rules', None)
self.default_security_rules = None
self.network_interfaces = None
self.subnets = None
self.resource_guid = None
self.provisioning_state = None
class NetworkSecurityGroupListResult(msrest.serialization.Model):
"""Response for ListNetworkSecurityGroups API service call.
:param value: A list of NetworkSecurityGroup resources.
:type value: list[~azure.mgmt.network.v2019_09_01.models.NetworkSecurityGroup]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkSecurityGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkSecurityGroupListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class NetworkSecurityGroupResult(msrest.serialization.Model):
"""Network configuration diagnostic result corresponded provided traffic query.
Variables are only populated by the server, and will be ignored when sending a request.
:param security_rule_access_result: The network traffic is allowed or denied. Possible values
include: "Allow", "Deny".
:type security_rule_access_result: str or
~azure.mgmt.network.v2019_09_01.models.SecurityRuleAccess
:ivar evaluated_network_security_groups: List of results network security groups diagnostic.
:vartype evaluated_network_security_groups:
list[~azure.mgmt.network.v2019_09_01.models.EvaluatedNetworkSecurityGroup]
"""
_validation = {
'evaluated_network_security_groups': {'readonly': True},
}
_attribute_map = {
'security_rule_access_result': {'key': 'securityRuleAccessResult', 'type': 'str'},
'evaluated_network_security_groups': {'key': 'evaluatedNetworkSecurityGroups', 'type': '[EvaluatedNetworkSecurityGroup]'},
}
def __init__(
self,
**kwargs
):
super(NetworkSecurityGroupResult, self).__init__(**kwargs)
self.security_rule_access_result = kwargs.get('security_rule_access_result', None)
self.evaluated_network_security_groups = None
class NetworkSecurityRulesEvaluationResult(msrest.serialization.Model):
"""Network security rules evaluation result.
:param name: Name of the network security rule.
:type name: str
:param protocol_matched: Value indicating whether protocol is matched.
:type protocol_matched: bool
:param source_matched: Value indicating whether source is matched.
:type source_matched: bool
:param source_port_matched: Value indicating whether source port is matched.
:type source_port_matched: bool
:param destination_matched: Value indicating whether destination is matched.
:type destination_matched: bool
:param destination_port_matched: Value indicating whether destination port is matched.
:type destination_port_matched: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'protocol_matched': {'key': 'protocolMatched', 'type': 'bool'},
'source_matched': {'key': 'sourceMatched', 'type': 'bool'},
'source_port_matched': {'key': 'sourcePortMatched', 'type': 'bool'},
'destination_matched': {'key': 'destinationMatched', 'type': 'bool'},
'destination_port_matched': {'key': 'destinationPortMatched', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(NetworkSecurityRulesEvaluationResult, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.protocol_matched = kwargs.get('protocol_matched', None)
self.source_matched = kwargs.get('source_matched', None)
self.source_port_matched = kwargs.get('source_port_matched', None)
self.destination_matched = kwargs.get('destination_matched', None)
self.destination_port_matched = kwargs.get('destination_port_matched', None)
class NetworkWatcher(Resource):
"""Network watcher in a resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar provisioning_state: The provisioning state of the network watcher resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NetworkWatcher, self).__init__(**kwargs)
self.etag = None
self.provisioning_state = None
class NetworkWatcherListResult(msrest.serialization.Model):
"""Response for ListNetworkWatchers API service call.
:param value: List of network watcher resources.
:type value: list[~azure.mgmt.network.v2019_09_01.models.NetworkWatcher]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[NetworkWatcher]'},
}
def __init__(
self,
**kwargs
):
super(NetworkWatcherListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class NextHopParameters(msrest.serialization.Model):
"""Parameters that define the source and destination endpoint.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The resource identifier of the target resource against
which the action is to be performed.
:type target_resource_id: str
:param source_ip_address: Required. The source IP address.
:type source_ip_address: str
:param destination_ip_address: Required. The destination IP address.
:type destination_ip_address: str
:param target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is
enabled on any of the nics, then this parameter must be specified. Otherwise optional).
:type target_nic_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
'source_ip_address': {'required': True},
'destination_ip_address': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'source_ip_address': {'key': 'sourceIPAddress', 'type': 'str'},
'destination_ip_address': {'key': 'destinationIPAddress', 'type': 'str'},
'target_nic_resource_id': {'key': 'targetNicResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NextHopParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.source_ip_address = kwargs['source_ip_address']
self.destination_ip_address = kwargs['destination_ip_address']
self.target_nic_resource_id = kwargs.get('target_nic_resource_id', None)
class NextHopResult(msrest.serialization.Model):
"""The information about next hop from the specified VM.
:param next_hop_type: Next hop type. Possible values include: "Internet", "VirtualAppliance",
"VirtualNetworkGateway", "VnetLocal", "HyperNetGateway", "None".
:type next_hop_type: str or ~azure.mgmt.network.v2019_09_01.models.NextHopType
:param next_hop_ip_address: Next hop IP Address.
:type next_hop_ip_address: str
:param route_table_id: The resource identifier for the route table associated with the route
being returned. If the route being returned does not correspond to any user created routes then
this field will be the string 'System Route'.
:type route_table_id: str
"""
_attribute_map = {
'next_hop_type': {'key': 'nextHopType', 'type': 'str'},
'next_hop_ip_address': {'key': 'nextHopIpAddress', 'type': 'str'},
'route_table_id': {'key': 'routeTableId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(NextHopResult, self).__init__(**kwargs)
self.next_hop_type = kwargs.get('next_hop_type', None)
self.next_hop_ip_address = kwargs.get('next_hop_ip_address', None)
self.route_table_id = kwargs.get('route_table_id', None)
class Operation(msrest.serialization.Model):
"""Network REST API operation definition.
:param name: Operation name: {provider}/{resource}/{operation}.
:type name: str
:param display: Display metadata associated with the operation.
:type display: ~azure.mgmt.network.v2019_09_01.models.OperationDisplay
:param origin: Origin of the operation.
:type origin: str
:param service_specification: Specification of the service.
:type service_specification:
~azure.mgmt.network.v2019_09_01.models.OperationPropertiesFormatServiceSpecification
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
'origin': {'key': 'origin', 'type': 'str'},
'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationPropertiesFormatServiceSpecification'},
}
def __init__(
self,
**kwargs
):
super(Operation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display = kwargs.get('display', None)
self.origin = kwargs.get('origin', None)
self.service_specification = kwargs.get('service_specification', None)
class OperationDisplay(msrest.serialization.Model):
"""Display metadata associated with the operation.
:param provider: Service provider: Microsoft Network.
:type provider: str
:param resource: Resource on which the operation is performed.
:type resource: str
:param operation: Type of the operation: get, read, delete, etc.
:type operation: str
:param description: Description of the operation.
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationDisplay, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.resource = kwargs.get('resource', None)
self.operation = kwargs.get('operation', None)
self.description = kwargs.get('description', None)
class OperationListResult(msrest.serialization.Model):
"""Result of the request to list Network operations. It contains a list of operations and a URL link to get the next set of results.
:param value: List of Network operations supported by the Network resource provider.
:type value: list[~azure.mgmt.network.v2019_09_01.models.Operation]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Operation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class OperationPropertiesFormatServiceSpecification(msrest.serialization.Model):
"""Specification of the service.
:param metric_specifications: Operation service specification.
:type metric_specifications: list[~azure.mgmt.network.v2019_09_01.models.MetricSpecification]
:param log_specifications: Operation log specification.
:type log_specifications: list[~azure.mgmt.network.v2019_09_01.models.LogSpecification]
"""
_attribute_map = {
'metric_specifications': {'key': 'metricSpecifications', 'type': '[MetricSpecification]'},
'log_specifications': {'key': 'logSpecifications', 'type': '[LogSpecification]'},
}
def __init__(
self,
**kwargs
):
super(OperationPropertiesFormatServiceSpecification, self).__init__(**kwargs)
self.metric_specifications = kwargs.get('metric_specifications', None)
self.log_specifications = kwargs.get('log_specifications', None)
class OutboundRule(SubResource):
"""Outbound rule of the load balancer.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the set of outbound rules used by
the load balancer. This name can be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param allocated_outbound_ports: The number of outbound ports to be used for NAT.
:type allocated_outbound_ports: int
:param frontend_ip_configurations: The Frontend IP addresses of the load balancer.
:type frontend_ip_configurations: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param backend_address_pool: A reference to a pool of DIPs. Outbound traffic is randomly load
balanced across IPs in the backend IPs.
:type backend_address_pool: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar provisioning_state: The provisioning state of the outbound rule resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param protocol: The protocol for the outbound rule in load balancer. Possible values include:
"Tcp", "Udp", "All".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.LoadBalancerOutboundRuleProtocol
:param enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected
connection termination. This element is only used when the protocol is set to TCP.
:type enable_tcp_reset: bool
:param idle_timeout_in_minutes: The timeout for the TCP idle connection.
:type idle_timeout_in_minutes: int
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'allocated_outbound_ports': {'key': 'properties.allocatedOutboundPorts', 'type': 'int'},
'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[SubResource]'},
'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'enable_tcp_reset': {'key': 'properties.enableTcpReset', 'type': 'bool'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(OutboundRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.allocated_outbound_ports = kwargs.get('allocated_outbound_ports', None)
self.frontend_ip_configurations = kwargs.get('frontend_ip_configurations', None)
self.backend_address_pool = kwargs.get('backend_address_pool', None)
self.provisioning_state = None
self.protocol = kwargs.get('protocol', None)
self.enable_tcp_reset = kwargs.get('enable_tcp_reset', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
class OwaspCrsExclusionEntry(msrest.serialization.Model):
"""Allow to exclude some variable satisfy the condition for the WAF check.
All required parameters must be populated in order to send to Azure.
:param match_variable: Required. The variable to be excluded. Possible values include:
"RequestHeaderNames", "RequestCookieNames", "RequestArgNames".
:type match_variable: str or
~azure.mgmt.network.v2019_09_01.models.OwaspCrsExclusionEntryMatchVariable
:param selector_match_operator: Required. When matchVariable is a collection, operate on the
selector to specify which elements in the collection this exclusion applies to. Possible values
include: "Equals", "Contains", "StartsWith", "EndsWith", "EqualsAny".
:type selector_match_operator: str or
~azure.mgmt.network.v2019_09_01.models.OwaspCrsExclusionEntrySelectorMatchOperator
:param selector: Required. When matchVariable is a collection, operator used to specify which
elements in the collection this exclusion applies to.
:type selector: str
"""
_validation = {
'match_variable': {'required': True},
'selector_match_operator': {'required': True},
'selector': {'required': True},
}
_attribute_map = {
'match_variable': {'key': 'matchVariable', 'type': 'str'},
'selector_match_operator': {'key': 'selectorMatchOperator', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OwaspCrsExclusionEntry, self).__init__(**kwargs)
self.match_variable = kwargs['match_variable']
self.selector_match_operator = kwargs['selector_match_operator']
self.selector = kwargs['selector']
class P2SConnectionConfiguration(SubResource):
"""P2SConnectionConfiguration Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param vpn_client_address_pool: The reference of the address space resource which represents
Address space for P2S VpnClient.
:type vpn_client_address_pool: ~azure.mgmt.network.v2019_09_01.models.AddressSpace
:ivar provisioning_state: The provisioning state of the P2SConnectionConfiguration resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'vpn_client_address_pool': {'key': 'properties.vpnClientAddressPool', 'type': 'AddressSpace'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SConnectionConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.vpn_client_address_pool = kwargs.get('vpn_client_address_pool', None)
self.provisioning_state = None
class P2SVpnConnectionHealth(msrest.serialization.Model):
"""P2S Vpn connection detailed health written to sas url.
:param sas_url: Returned sas url of the blob to which the p2s vpn connection detailed health
will be written.
:type sas_url: str
"""
_attribute_map = {
'sas_url': {'key': 'sasUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnConnectionHealth, self).__init__(**kwargs)
self.sas_url = kwargs.get('sas_url', None)
class P2SVpnConnectionHealthRequest(msrest.serialization.Model):
"""List of P2S Vpn connection health request.
:param vpn_user_names_filter: The list of p2s vpn user names whose p2s vpn connection detailed
health to retrieve for.
:type vpn_user_names_filter: list[str]
:param output_blob_sas_url: The sas-url to download the P2S Vpn connection health detail.
:type output_blob_sas_url: str
"""
_attribute_map = {
'vpn_user_names_filter': {'key': 'vpnUserNamesFilter', 'type': '[str]'},
'output_blob_sas_url': {'key': 'outputBlobSasUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnConnectionHealthRequest, self).__init__(**kwargs)
self.vpn_user_names_filter = kwargs.get('vpn_user_names_filter', None)
self.output_blob_sas_url = kwargs.get('output_blob_sas_url', None)
class P2SVpnGateway(Resource):
"""P2SVpnGateway Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_hub: The VirtualHub to which the gateway belongs.
:type virtual_hub: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param p2_s_connection_configurations: List of all p2s connection configurations of the
gateway.
:type p2_s_connection_configurations:
list[~azure.mgmt.network.v2019_09_01.models.P2SConnectionConfiguration]
:ivar provisioning_state: The provisioning state of the P2S VPN gateway resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param vpn_gateway_scale_unit: The scale unit for this p2s vpn gateway.
:type vpn_gateway_scale_unit: int
:param vpn_server_configuration: The VpnServerConfiguration to which the p2sVpnGateway is
attached to.
:type vpn_server_configuration: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar vpn_client_connection_health: All P2S VPN clients' connection health status.
:vartype vpn_client_connection_health:
~azure.mgmt.network.v2019_09_01.models.VpnClientConnectionHealth
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'vpn_client_connection_health': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_hub': {'key': 'properties.virtualHub', 'type': 'SubResource'},
'p2_s_connection_configurations': {'key': 'properties.p2SConnectionConfigurations', 'type': '[P2SConnectionConfiguration]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'vpn_gateway_scale_unit': {'key': 'properties.vpnGatewayScaleUnit', 'type': 'int'},
'vpn_server_configuration': {'key': 'properties.vpnServerConfiguration', 'type': 'SubResource'},
'vpn_client_connection_health': {'key': 'properties.vpnClientConnectionHealth', 'type': 'VpnClientConnectionHealth'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnGateway, self).__init__(**kwargs)
self.etag = None
self.virtual_hub = kwargs.get('virtual_hub', None)
self.p2_s_connection_configurations = kwargs.get('p2_s_connection_configurations', None)
self.provisioning_state = None
self.vpn_gateway_scale_unit = kwargs.get('vpn_gateway_scale_unit', None)
self.vpn_server_configuration = kwargs.get('vpn_server_configuration', None)
self.vpn_client_connection_health = None
class P2SVpnProfileParameters(msrest.serialization.Model):
"""Vpn Client Parameters for package generation.
:param authentication_method: VPN client authentication method. Possible values include:
"EAPTLS", "EAPMSCHAPv2".
:type authentication_method: str or ~azure.mgmt.network.v2019_09_01.models.AuthenticationMethod
"""
_attribute_map = {
'authentication_method': {'key': 'authenticationMethod', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(P2SVpnProfileParameters, self).__init__(**kwargs)
self.authentication_method = kwargs.get('authentication_method', None)
class PacketCapture(msrest.serialization.Model):
"""Parameters that define the create packet capture operation.
All required parameters must be populated in order to send to Azure.
:param target: Required. The ID of the targeted resource, only VM is currently supported.
:type target: str
:param bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:type bytes_to_capture_per_packet: int
:param total_bytes_per_session: Maximum size of the capture output.
:type total_bytes_per_session: int
:param time_limit_in_seconds: Maximum duration of the capture session in seconds.
:type time_limit_in_seconds: int
:param storage_location: Required. Describes the storage location for a packet capture session.
:type storage_location: ~azure.mgmt.network.v2019_09_01.models.PacketCaptureStorageLocation
:param filters: A list of packet capture filters.
:type filters: list[~azure.mgmt.network.v2019_09_01.models.PacketCaptureFilter]
"""
_validation = {
'target': {'required': True},
'storage_location': {'required': True},
}
_attribute_map = {
'target': {'key': 'properties.target', 'type': 'str'},
'bytes_to_capture_per_packet': {'key': 'properties.bytesToCapturePerPacket', 'type': 'int'},
'total_bytes_per_session': {'key': 'properties.totalBytesPerSession', 'type': 'int'},
'time_limit_in_seconds': {'key': 'properties.timeLimitInSeconds', 'type': 'int'},
'storage_location': {'key': 'properties.storageLocation', 'type': 'PacketCaptureStorageLocation'},
'filters': {'key': 'properties.filters', 'type': '[PacketCaptureFilter]'},
}
def __init__(
self,
**kwargs
):
super(PacketCapture, self).__init__(**kwargs)
self.target = kwargs['target']
self.bytes_to_capture_per_packet = kwargs.get('bytes_to_capture_per_packet', 0)
self.total_bytes_per_session = kwargs.get('total_bytes_per_session', 1073741824)
self.time_limit_in_seconds = kwargs.get('time_limit_in_seconds', 18000)
self.storage_location = kwargs['storage_location']
self.filters = kwargs.get('filters', None)
class PacketCaptureFilter(msrest.serialization.Model):
"""Filter that is applied to packet capture request. Multiple filters can be applied.
:param protocol: Protocol to be filtered on. Possible values include: "TCP", "UDP", "Any".
Default value: "Any".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.PcProtocol
:param local_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single
address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5"? for multiple entries.
Multiple ranges not currently supported. Mixing ranges with multiple entries not currently
supported. Default = null.
:type local_ip_address: str
:param remote_ip_address: Local IP Address to be filtered on. Notation: "127.0.0.1" for single
address entry. "127.0.0.1-127.0.0.255" for range. "127.0.0.1;127.0.0.5;" for multiple entries.
Multiple ranges not currently supported. Mixing ranges with multiple entries not currently
supported. Default = null.
:type remote_ip_address: str
:param local_port: Local port to be filtered on. Notation: "80" for single port entry."80-85"
for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing
ranges with multiple entries not currently supported. Default = null.
:type local_port: str
:param remote_port: Remote port to be filtered on. Notation: "80" for single port entry."80-85"
for range. "80;443;" for multiple entries. Multiple ranges not currently supported. Mixing
ranges with multiple entries not currently supported. Default = null.
:type remote_port: str
"""
_attribute_map = {
'protocol': {'key': 'protocol', 'type': 'str'},
'local_ip_address': {'key': 'localIPAddress', 'type': 'str'},
'remote_ip_address': {'key': 'remoteIPAddress', 'type': 'str'},
'local_port': {'key': 'localPort', 'type': 'str'},
'remote_port': {'key': 'remotePort', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureFilter, self).__init__(**kwargs)
self.protocol = kwargs.get('protocol', "Any")
self.local_ip_address = kwargs.get('local_ip_address', None)
self.remote_ip_address = kwargs.get('remote_ip_address', None)
self.local_port = kwargs.get('local_port', None)
self.remote_port = kwargs.get('remote_port', None)
class PacketCaptureListResult(msrest.serialization.Model):
"""List of packet capture sessions.
:param value: Information about packet capture sessions.
:type value: list[~azure.mgmt.network.v2019_09_01.models.PacketCaptureResult]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PacketCaptureResult]'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class PacketCaptureParameters(msrest.serialization.Model):
"""Parameters that define the create packet capture operation.
All required parameters must be populated in order to send to Azure.
:param target: Required. The ID of the targeted resource, only VM is currently supported.
:type target: str
:param bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:type bytes_to_capture_per_packet: int
:param total_bytes_per_session: Maximum size of the capture output.
:type total_bytes_per_session: int
:param time_limit_in_seconds: Maximum duration of the capture session in seconds.
:type time_limit_in_seconds: int
:param storage_location: Required. Describes the storage location for a packet capture session.
:type storage_location: ~azure.mgmt.network.v2019_09_01.models.PacketCaptureStorageLocation
:param filters: A list of packet capture filters.
:type filters: list[~azure.mgmt.network.v2019_09_01.models.PacketCaptureFilter]
"""
_validation = {
'target': {'required': True},
'storage_location': {'required': True},
}
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'bytes_to_capture_per_packet': {'key': 'bytesToCapturePerPacket', 'type': 'int'},
'total_bytes_per_session': {'key': 'totalBytesPerSession', 'type': 'int'},
'time_limit_in_seconds': {'key': 'timeLimitInSeconds', 'type': 'int'},
'storage_location': {'key': 'storageLocation', 'type': 'PacketCaptureStorageLocation'},
'filters': {'key': 'filters', 'type': '[PacketCaptureFilter]'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureParameters, self).__init__(**kwargs)
self.target = kwargs['target']
self.bytes_to_capture_per_packet = kwargs.get('bytes_to_capture_per_packet', 0)
self.total_bytes_per_session = kwargs.get('total_bytes_per_session', 1073741824)
self.time_limit_in_seconds = kwargs.get('time_limit_in_seconds', 18000)
self.storage_location = kwargs['storage_location']
self.filters = kwargs.get('filters', None)
class PacketCaptureQueryStatusResult(msrest.serialization.Model):
"""Status of packet capture session.
:param name: The name of the packet capture resource.
:type name: str
:param id: The ID of the packet capture resource.
:type id: str
:param capture_start_time: The start time of the packet capture session.
:type capture_start_time: ~datetime.datetime
:param packet_capture_status: The status of the packet capture session. Possible values
include: "NotStarted", "Running", "Stopped", "Error", "Unknown".
:type packet_capture_status: str or ~azure.mgmt.network.v2019_09_01.models.PcStatus
:param stop_reason: The reason the current packet capture session was stopped.
:type stop_reason: str
:param packet_capture_error: List of errors of packet capture session.
:type packet_capture_error: list[str or ~azure.mgmt.network.v2019_09_01.models.PcError]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'capture_start_time': {'key': 'captureStartTime', 'type': 'iso-8601'},
'packet_capture_status': {'key': 'packetCaptureStatus', 'type': 'str'},
'stop_reason': {'key': 'stopReason', 'type': 'str'},
'packet_capture_error': {'key': 'packetCaptureError', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureQueryStatusResult, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.capture_start_time = kwargs.get('capture_start_time', None)
self.packet_capture_status = kwargs.get('packet_capture_status', None)
self.stop_reason = kwargs.get('stop_reason', None)
self.packet_capture_error = kwargs.get('packet_capture_error', None)
class PacketCaptureResult(msrest.serialization.Model):
"""Information about packet capture session.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Name of the packet capture session.
:vartype name: str
:ivar id: ID of the packet capture operation.
:vartype id: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param target: The ID of the targeted resource, only VM is currently supported.
:type target: str
:param bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:type bytes_to_capture_per_packet: int
:param total_bytes_per_session: Maximum size of the capture output.
:type total_bytes_per_session: int
:param time_limit_in_seconds: Maximum duration of the capture session in seconds.
:type time_limit_in_seconds: int
:param storage_location: Describes the storage location for a packet capture session.
:type storage_location: ~azure.mgmt.network.v2019_09_01.models.PacketCaptureStorageLocation
:param filters: A list of packet capture filters.
:type filters: list[~azure.mgmt.network.v2019_09_01.models.PacketCaptureFilter]
:ivar provisioning_state: The provisioning state of the packet capture session. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'target': {'key': 'properties.target', 'type': 'str'},
'bytes_to_capture_per_packet': {'key': 'properties.bytesToCapturePerPacket', 'type': 'int'},
'total_bytes_per_session': {'key': 'properties.totalBytesPerSession', 'type': 'int'},
'time_limit_in_seconds': {'key': 'properties.timeLimitInSeconds', 'type': 'int'},
'storage_location': {'key': 'properties.storageLocation', 'type': 'PacketCaptureStorageLocation'},
'filters': {'key': 'properties.filters', 'type': '[PacketCaptureFilter]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureResult, self).__init__(**kwargs)
self.name = None
self.id = None
self.etag = None
self.target = kwargs.get('target', None)
self.bytes_to_capture_per_packet = kwargs.get('bytes_to_capture_per_packet', 0)
self.total_bytes_per_session = kwargs.get('total_bytes_per_session', 1073741824)
self.time_limit_in_seconds = kwargs.get('time_limit_in_seconds', 18000)
self.storage_location = kwargs.get('storage_location', None)
self.filters = kwargs.get('filters', None)
self.provisioning_state = None
class PacketCaptureResultProperties(PacketCaptureParameters):
"""Describes the properties of a packet capture session.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param target: Required. The ID of the targeted resource, only VM is currently supported.
:type target: str
:param bytes_to_capture_per_packet: Number of bytes captured per packet, the remaining bytes
are truncated.
:type bytes_to_capture_per_packet: int
:param total_bytes_per_session: Maximum size of the capture output.
:type total_bytes_per_session: int
:param time_limit_in_seconds: Maximum duration of the capture session in seconds.
:type time_limit_in_seconds: int
:param storage_location: Required. Describes the storage location for a packet capture session.
:type storage_location: ~azure.mgmt.network.v2019_09_01.models.PacketCaptureStorageLocation
:param filters: A list of packet capture filters.
:type filters: list[~azure.mgmt.network.v2019_09_01.models.PacketCaptureFilter]
:ivar provisioning_state: The provisioning state of the packet capture session. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'target': {'required': True},
'storage_location': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'target': {'key': 'target', 'type': 'str'},
'bytes_to_capture_per_packet': {'key': 'bytesToCapturePerPacket', 'type': 'int'},
'total_bytes_per_session': {'key': 'totalBytesPerSession', 'type': 'int'},
'time_limit_in_seconds': {'key': 'timeLimitInSeconds', 'type': 'int'},
'storage_location': {'key': 'storageLocation', 'type': 'PacketCaptureStorageLocation'},
'filters': {'key': 'filters', 'type': '[PacketCaptureFilter]'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureResultProperties, self).__init__(**kwargs)
self.provisioning_state = None
class PacketCaptureStorageLocation(msrest.serialization.Model):
"""Describes the storage location for a packet capture session.
:param storage_id: The ID of the storage account to save the packet capture session. Required
if no local file path is provided.
:type storage_id: str
:param storage_path: The URI of the storage path to save the packet capture. Must be a well-
formed URI describing the location to save the packet capture.
:type storage_path: str
:param file_path: A valid local path on the targeting VM. Must include the name of the capture
file (*.cap). For linux virtual machine it must start with /var/captures. Required if no
storage ID is provided, otherwise optional.
:type file_path: str
"""
_attribute_map = {
'storage_id': {'key': 'storageId', 'type': 'str'},
'storage_path': {'key': 'storagePath', 'type': 'str'},
'file_path': {'key': 'filePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PacketCaptureStorageLocation, self).__init__(**kwargs)
self.storage_id = kwargs.get('storage_id', None)
self.storage_path = kwargs.get('storage_path', None)
self.file_path = kwargs.get('file_path', None)
class PatchRouteFilter(SubResource):
"""Route Filter Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Resource type.
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param rules: Collection of RouteFilterRules contained within a route filter.
:type rules: list[~azure.mgmt.network.v2019_09_01.models.RouteFilterRule]
:ivar peerings: A collection of references to express route circuit peerings.
:vartype peerings: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeering]
:ivar ipv6_peerings: A collection of references to express route circuit ipv6 peerings.
:vartype ipv6_peerings: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeering]
:ivar provisioning_state: The provisioning state of the route filter resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'etag': {'readonly': True},
'type': {'readonly': True},
'peerings': {'readonly': True},
'ipv6_peerings': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'rules': {'key': 'properties.rules', 'type': '[RouteFilterRule]'},
'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCircuitPeering]'},
'ipv6_peerings': {'key': 'properties.ipv6Peerings', 'type': '[ExpressRouteCircuitPeering]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PatchRouteFilter, self).__init__(**kwargs)
self.name = None
self.etag = None
self.type = None
self.tags = kwargs.get('tags', None)
self.rules = kwargs.get('rules', None)
self.peerings = None
self.ipv6_peerings = None
self.provisioning_state = None
class PatchRouteFilterRule(SubResource):
"""Route Filter Rule Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:vartype name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param access: The access type of the rule. Possible values include: "Allow", "Deny".
:type access: str or ~azure.mgmt.network.v2019_09_01.models.Access
:param route_filter_rule_type: The rule type of the rule. Possible values include: "Community".
:type route_filter_rule_type: str or ~azure.mgmt.network.v2019_09_01.models.RouteFilterRuleType
:param communities: The collection for bgp community values to filter on. e.g.
['12076:5010','12076:5020'].
:type communities: list[str]
:ivar provisioning_state: The provisioning state of the route filter rule resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'access': {'key': 'properties.access', 'type': 'str'},
'route_filter_rule_type': {'key': 'properties.routeFilterRuleType', 'type': 'str'},
'communities': {'key': 'properties.communities', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PatchRouteFilterRule, self).__init__(**kwargs)
self.name = None
self.etag = None
self.access = kwargs.get('access', None)
self.route_filter_rule_type = kwargs.get('route_filter_rule_type', None)
self.communities = kwargs.get('communities', None)
self.provisioning_state = None
class PeerExpressRouteCircuitConnection(SubResource):
"""Peer Express Route Circuit Connection in an ExpressRouteCircuitPeering resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param express_route_circuit_peering: Reference to Express Route Circuit Private Peering
Resource of the circuit.
:type express_route_circuit_peering: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param peer_express_route_circuit_peering: Reference to Express Route Circuit Private Peering
Resource of the peered circuit.
:type peer_express_route_circuit_peering: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param address_prefix: /29 IP address space to carve out Customer addresses for tunnels.
:type address_prefix: str
:ivar circuit_connection_status: Express Route Circuit connection state. Possible values
include: "Connected", "Connecting", "Disconnected".
:vartype circuit_connection_status: str or
~azure.mgmt.network.v2019_09_01.models.CircuitConnectionStatus
:param connection_name: The name of the express route circuit connection resource.
:type connection_name: str
:param auth_resource_guid: The resource guid of the authorization used for the express route
circuit connection.
:type auth_resource_guid: str
:ivar provisioning_state: The provisioning state of the peer express route circuit connection
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'circuit_connection_status': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'express_route_circuit_peering': {'key': 'properties.expressRouteCircuitPeering', 'type': 'SubResource'},
'peer_express_route_circuit_peering': {'key': 'properties.peerExpressRouteCircuitPeering', 'type': 'SubResource'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'circuit_connection_status': {'key': 'properties.circuitConnectionStatus', 'type': 'str'},
'connection_name': {'key': 'properties.connectionName', 'type': 'str'},
'auth_resource_guid': {'key': 'properties.authResourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PeerExpressRouteCircuitConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.express_route_circuit_peering = kwargs.get('express_route_circuit_peering', None)
self.peer_express_route_circuit_peering = kwargs.get('peer_express_route_circuit_peering', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.circuit_connection_status = None
self.connection_name = kwargs.get('connection_name', None)
self.auth_resource_guid = kwargs.get('auth_resource_guid', None)
self.provisioning_state = None
class PeerExpressRouteCircuitConnectionListResult(msrest.serialization.Model):
"""Response for ListPeeredConnections API service call retrieves all global reach peer circuit connections that belongs to a Private Peering for an ExpressRouteCircuit.
:param value: The global reach peer circuit connection associated with Private Peering in an
ExpressRoute Circuit.
:type value: list[~azure.mgmt.network.v2019_09_01.models.PeerExpressRouteCircuitConnection]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PeerExpressRouteCircuitConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PeerExpressRouteCircuitConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class PolicySettings(msrest.serialization.Model):
"""Defines contents of a web application firewall global configuration.
:param state: Describes if the policy is in enabled state or disabled state. Possible values
include: "Disabled", "Enabled".
:type state: str or ~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallEnabledState
:param mode: Describes if it is in detection mode or prevention mode at policy level. Possible
values include: "Prevention", "Detection".
:type mode: str or ~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallMode
:param request_body_check: Whether to allow WAF to check request Body.
:type request_body_check: bool
:param max_request_body_size_in_kb: Maximum request body size in Kb for WAF.
:type max_request_body_size_in_kb: int
:param file_upload_limit_in_mb: Maximum file upload size in Mb for WAF.
:type file_upload_limit_in_mb: int
"""
_validation = {
'max_request_body_size_in_kb': {'maximum': 128, 'minimum': 8},
'file_upload_limit_in_mb': {'minimum': 0},
}
_attribute_map = {
'state': {'key': 'state', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'request_body_check': {'key': 'requestBodyCheck', 'type': 'bool'},
'max_request_body_size_in_kb': {'key': 'maxRequestBodySizeInKb', 'type': 'int'},
'file_upload_limit_in_mb': {'key': 'fileUploadLimitInMb', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(PolicySettings, self).__init__(**kwargs)
self.state = kwargs.get('state', None)
self.mode = kwargs.get('mode', None)
self.request_body_check = kwargs.get('request_body_check', None)
self.max_request_body_size_in_kb = kwargs.get('max_request_body_size_in_kb', None)
self.file_upload_limit_in_mb = kwargs.get('file_upload_limit_in_mb', None)
class PrepareNetworkPoliciesRequest(msrest.serialization.Model):
"""Details of PrepareNetworkPolicies for Subnet.
:param service_name: The name of the service for which subnet is being prepared for.
:type service_name: str
:param network_intent_policy_configurations: A list of NetworkIntentPolicyConfiguration.
:type network_intent_policy_configurations:
list[~azure.mgmt.network.v2019_09_01.models.NetworkIntentPolicyConfiguration]
"""
_attribute_map = {
'service_name': {'key': 'serviceName', 'type': 'str'},
'network_intent_policy_configurations': {'key': 'networkIntentPolicyConfigurations', 'type': '[NetworkIntentPolicyConfiguration]'},
}
def __init__(
self,
**kwargs
):
super(PrepareNetworkPoliciesRequest, self).__init__(**kwargs)
self.service_name = kwargs.get('service_name', None)
self.network_intent_policy_configurations = kwargs.get('network_intent_policy_configurations', None)
class PrivateEndpoint(Resource):
"""Private endpoint resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param subnet: The ID of the subnet from which the private IP will be allocated.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.Subnet
:ivar network_interfaces: An array of references to the network interfaces created for this
private endpoint.
:vartype network_interfaces: list[~azure.mgmt.network.v2019_09_01.models.NetworkInterface]
:ivar provisioning_state: The provisioning state of the private endpoint resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param private_link_service_connections: A grouping of information about the connection to the
remote resource.
:type private_link_service_connections:
list[~azure.mgmt.network.v2019_09_01.models.PrivateLinkServiceConnection]
:param manual_private_link_service_connections: A grouping of information about the connection
to the remote resource. Used when the network admin does not have access to approve connections
to the remote resource.
:type manual_private_link_service_connections:
list[~azure.mgmt.network.v2019_09_01.models.PrivateLinkServiceConnection]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'network_interfaces': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_link_service_connections': {'key': 'properties.privateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'},
'manual_private_link_service_connections': {'key': 'properties.manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpoint, self).__init__(**kwargs)
self.etag = None
self.subnet = kwargs.get('subnet', None)
self.network_interfaces = None
self.provisioning_state = None
self.private_link_service_connections = kwargs.get('private_link_service_connections', None)
self.manual_private_link_service_connections = kwargs.get('manual_private_link_service_connections', None)
class PrivateEndpointConnection(SubResource):
"""PrivateEndpointConnection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar type: The resource type.
:vartype type: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar private_endpoint: The resource of private end point.
:vartype private_endpoint: ~azure.mgmt.network.v2019_09_01.models.PrivateEndpoint
:param private_link_service_connection_state: A collection of information about the state of
the connection between service consumer and provider.
:type private_link_service_connection_state:
~azure.mgmt.network.v2019_09_01.models.PrivateLinkServiceConnectionState
:ivar provisioning_state: The provisioning state of the private endpoint connection resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:ivar link_identifier: The consumer link id.
:vartype link_identifier: str
"""
_validation = {
'type': {'readonly': True},
'etag': {'readonly': True},
'private_endpoint': {'readonly': True},
'provisioning_state': {'readonly': True},
'link_identifier': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'link_identifier': {'key': 'properties.linkIdentifier', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpointConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = None
self.private_endpoint = None
self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
self.provisioning_state = None
self.link_identifier = None
class PrivateEndpointConnectionListResult(msrest.serialization.Model):
"""Response for the ListPrivateEndpointConnection API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of PrivateEndpointConnection resources for a specific private link
service.
:type value: list[~azure.mgmt.network.v2019_09_01.models.PrivateEndpointConnection]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpointConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class PrivateEndpointListResult(msrest.serialization.Model):
"""Response for the ListPrivateEndpoints API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of private endpoint resources in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.PrivateEndpoint]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[PrivateEndpoint]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateEndpointListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class PrivateLinkService(Resource):
"""Private link service resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param load_balancer_frontend_ip_configurations: An array of references to the load balancer IP
configurations.
:type load_balancer_frontend_ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.FrontendIPConfiguration]
:param ip_configurations: An array of private link service IP configurations.
:type ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.PrivateLinkServiceIpConfiguration]
:ivar network_interfaces: An array of references to the network interfaces created for this
private link service.
:vartype network_interfaces: list[~azure.mgmt.network.v2019_09_01.models.NetworkInterface]
:ivar provisioning_state: The provisioning state of the private link service resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:ivar private_endpoint_connections: An array of list about connections to the private endpoint.
:vartype private_endpoint_connections:
list[~azure.mgmt.network.v2019_09_01.models.PrivateEndpointConnection]
:param visibility: The visibility list of the private link service.
:type visibility: ~azure.mgmt.network.v2019_09_01.models.ResourceSet
:param auto_approval: The auto-approval list of the private link service.
:type auto_approval: ~azure.mgmt.network.v2019_09_01.models.ResourceSet
:param fqdns: The list of Fqdn.
:type fqdns: list[str]
:ivar alias: The alias of the private link service.
:vartype alias: str
:param enable_proxy_protocol: Whether the private link service is enabled for proxy protocol or
not.
:type enable_proxy_protocol: bool
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'network_interfaces': {'readonly': True},
'provisioning_state': {'readonly': True},
'private_endpoint_connections': {'readonly': True},
'alias': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'load_balancer_frontend_ip_configurations': {'key': 'properties.loadBalancerFrontendIpConfigurations', 'type': '[FrontendIPConfiguration]'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[PrivateLinkServiceIpConfiguration]'},
'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
'visibility': {'key': 'properties.visibility', 'type': 'ResourceSet'},
'auto_approval': {'key': 'properties.autoApproval', 'type': 'ResourceSet'},
'fqdns': {'key': 'properties.fqdns', 'type': '[str]'},
'alias': {'key': 'properties.alias', 'type': 'str'},
'enable_proxy_protocol': {'key': 'properties.enableProxyProtocol', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkService, self).__init__(**kwargs)
self.etag = None
self.load_balancer_frontend_ip_configurations = kwargs.get('load_balancer_frontend_ip_configurations', None)
self.ip_configurations = kwargs.get('ip_configurations', None)
self.network_interfaces = None
self.provisioning_state = None
self.private_endpoint_connections = None
self.visibility = kwargs.get('visibility', None)
self.auto_approval = kwargs.get('auto_approval', None)
self.fqdns = kwargs.get('fqdns', None)
self.alias = None
self.enable_proxy_protocol = kwargs.get('enable_proxy_protocol', None)
class PrivateLinkServiceConnection(SubResource):
"""PrivateLinkServiceConnection resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar type: The resource type.
:vartype type: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar provisioning_state: The provisioning state of the private link service connection
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param private_link_service_id: The resource id of private link service.
:type private_link_service_id: str
:param group_ids: The ID(s) of the group(s) obtained from the remote resource that this private
endpoint should connect to.
:type group_ids: list[str]
:param request_message: A message passed to the owner of the remote resource with this
connection request. Restricted to 140 chars.
:type request_message: str
:param private_link_service_connection_state: A collection of read-only information about the
state of the connection to the remote resource.
:type private_link_service_connection_state:
~azure.mgmt.network.v2019_09_01.models.PrivateLinkServiceConnectionState
"""
_validation = {
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_link_service_id': {'key': 'properties.privateLinkServiceId', 'type': 'str'},
'group_ids': {'key': 'properties.groupIds', 'type': '[str]'},
'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = None
self.etag = None
self.provisioning_state = None
self.private_link_service_id = kwargs.get('private_link_service_id', None)
self.group_ids = kwargs.get('group_ids', None)
self.request_message = kwargs.get('request_message', None)
self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
class PrivateLinkServiceConnectionState(msrest.serialization.Model):
"""A collection of information about the state of the connection between service consumer and provider.
:param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
of the service.
:type status: str
:param description: The reason for approval/rejection of the connection.
:type description: str
:param actions_required: A message indicating if changes on the service provider require any
updates on the consumer.
:type actions_required: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'actions_required': {'key': 'actionsRequired', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.description = kwargs.get('description', None)
self.actions_required = kwargs.get('actions_required', None)
class PrivateLinkServiceIpConfiguration(SubResource):
"""The private link service ip configuration.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of private link service ip configuration.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: The resource type.
:vartype type: str
:param private_ip_address: The private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_09_01.models.IPAllocationMethod
:param subnet: The reference to the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.Subnet
:param primary: Whether the ip configuration is primary or not.
:type primary: bool
:ivar provisioning_state: The provisioning state of the private link service IP configuration
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param private_ip_address_version: Whether the specific IP configuration is IPv4 or IPv6.
Default is IPv4. Possible values include: "IPv4", "IPv6".
:type private_ip_address_version: str or ~azure.mgmt.network.v2019_09_01.models.IPVersion
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'primary': {'key': 'properties.primary', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_ip_address_version': {'key': 'properties.privateIPAddressVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceIpConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.private_ip_address = kwargs.get('private_ip_address', None)
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.subnet = kwargs.get('subnet', None)
self.primary = kwargs.get('primary', None)
self.provisioning_state = None
self.private_ip_address_version = kwargs.get('private_ip_address_version', None)
class PrivateLinkServiceListResult(msrest.serialization.Model):
"""Response for the ListPrivateLinkService API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of PrivateLinkService resources in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.PrivateLinkService]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[PrivateLinkService]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ResourceSet(msrest.serialization.Model):
"""The base resource set for visibility and auto-approval.
:param subscriptions: The list of subscriptions.
:type subscriptions: list[str]
"""
_attribute_map = {
'subscriptions': {'key': 'subscriptions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ResourceSet, self).__init__(**kwargs)
self.subscriptions = kwargs.get('subscriptions', None)
class PrivateLinkServicePropertiesAutoApproval(ResourceSet):
"""The auto-approval list of the private link service.
:param subscriptions: The list of subscriptions.
:type subscriptions: list[str]
"""
_attribute_map = {
'subscriptions': {'key': 'subscriptions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServicePropertiesAutoApproval, self).__init__(**kwargs)
class PrivateLinkServicePropertiesVisibility(ResourceSet):
"""The visibility list of the private link service.
:param subscriptions: The list of subscriptions.
:type subscriptions: list[str]
"""
_attribute_map = {
'subscriptions': {'key': 'subscriptions', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServicePropertiesVisibility, self).__init__(**kwargs)
class PrivateLinkServiceVisibility(msrest.serialization.Model):
"""Response for the CheckPrivateLinkServiceVisibility API service call.
:param visible: Private Link Service Visibility (True/False).
:type visible: bool
"""
_attribute_map = {
'visible': {'key': 'visible', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(PrivateLinkServiceVisibility, self).__init__(**kwargs)
self.visible = kwargs.get('visible', None)
class Probe(SubResource):
"""A load balancer probe.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the set of probes used by the load
balancer. This name can be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:ivar load_balancing_rules: The load balancer rules that use this probe.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param protocol: The protocol of the end point. If 'Tcp' is specified, a received ACK is
required for the probe to be successful. If 'Http' or 'Https' is specified, a 200 OK response
from the specifies URI is required for the probe to be successful. Possible values include:
"Http", "Tcp", "Https".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.ProbeProtocol
:param port: The port for communicating the probe. Possible values range from 1 to 65535,
inclusive.
:type port: int
:param interval_in_seconds: The interval, in seconds, for how frequently to probe the endpoint
for health status. Typically, the interval is slightly less than half the allocated timeout
period (in seconds) which allows two full probes before taking the instance out of rotation.
The default value is 15, the minimum value is 5.
:type interval_in_seconds: int
:param number_of_probes: The number of probes where if no response, will result in stopping
further traffic from being delivered to the endpoint. This values allows endpoints to be taken
out of rotation faster or slower than the typical times used in Azure.
:type number_of_probes: int
:param request_path: The URI used for requesting health status from the VM. Path is required if
a protocol is set to http. Otherwise, it is not allowed. There is no default value.
:type request_path: str
:ivar provisioning_state: The provisioning state of the probe resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'load_balancing_rules': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'interval_in_seconds': {'key': 'properties.intervalInSeconds', 'type': 'int'},
'number_of_probes': {'key': 'properties.numberOfProbes', 'type': 'int'},
'request_path': {'key': 'properties.requestPath', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Probe, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.load_balancing_rules = None
self.protocol = kwargs.get('protocol', None)
self.port = kwargs.get('port', None)
self.interval_in_seconds = kwargs.get('interval_in_seconds', None)
self.number_of_probes = kwargs.get('number_of_probes', None)
self.request_path = kwargs.get('request_path', None)
self.provisioning_state = None
class ProtocolConfiguration(msrest.serialization.Model):
"""Configuration of the protocol.
:param http_configuration: HTTP configuration of the connectivity check.
:type http_configuration: ~azure.mgmt.network.v2019_09_01.models.HTTPConfiguration
"""
_attribute_map = {
'http_configuration': {'key': 'HTTPConfiguration', 'type': 'HTTPConfiguration'},
}
def __init__(
self,
**kwargs
):
super(ProtocolConfiguration, self).__init__(**kwargs)
self.http_configuration = kwargs.get('http_configuration', None)
class ProtocolCustomSettingsFormat(msrest.serialization.Model):
"""DDoS custom policy properties.
:param protocol: The protocol for which the DDoS protection policy is being customized.
Possible values include: "Tcp", "Udp", "Syn".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.DdosCustomPolicyProtocol
:param trigger_rate_override: The customized DDoS protection trigger rate.
:type trigger_rate_override: str
:param source_rate_override: The customized DDoS protection source rate.
:type source_rate_override: str
:param trigger_sensitivity_override: The customized DDoS protection trigger rate sensitivity
degrees. High: Trigger rate set with most sensitivity w.r.t. normal traffic. Default: Trigger
rate set with moderate sensitivity w.r.t. normal traffic. Low: Trigger rate set with less
sensitivity w.r.t. normal traffic. Relaxed: Trigger rate set with least sensitivity w.r.t.
normal traffic. Possible values include: "Relaxed", "Low", "Default", "High".
:type trigger_sensitivity_override: str or
~azure.mgmt.network.v2019_09_01.models.DdosCustomPolicyTriggerSensitivityOverride
"""
_attribute_map = {
'protocol': {'key': 'protocol', 'type': 'str'},
'trigger_rate_override': {'key': 'triggerRateOverride', 'type': 'str'},
'source_rate_override': {'key': 'sourceRateOverride', 'type': 'str'},
'trigger_sensitivity_override': {'key': 'triggerSensitivityOverride', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ProtocolCustomSettingsFormat, self).__init__(**kwargs)
self.protocol = kwargs.get('protocol', None)
self.trigger_rate_override = kwargs.get('trigger_rate_override', None)
self.source_rate_override = kwargs.get('source_rate_override', None)
self.trigger_sensitivity_override = kwargs.get('trigger_sensitivity_override', None)
class PublicIPAddress(Resource):
"""Public IP address resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The public IP address SKU.
:type sku: ~azure.mgmt.network.v2019_09_01.models.PublicIPAddressSku
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param zones: A list of availability zones denoting the IP allocated for the resource needs to
come from.
:type zones: list[str]
:param public_ip_allocation_method: The public IP address allocation method. Possible values
include: "Static", "Dynamic".
:type public_ip_allocation_method: str or
~azure.mgmt.network.v2019_09_01.models.IPAllocationMethod
:param public_ip_address_version: The public IP address version. Possible values include:
"IPv4", "IPv6".
:type public_ip_address_version: str or ~azure.mgmt.network.v2019_09_01.models.IPVersion
:ivar ip_configuration: The IP configuration associated with the public IP address.
:vartype ip_configuration: ~azure.mgmt.network.v2019_09_01.models.IPConfiguration
:param dns_settings: The FQDN of the DNS record associated with the public IP address.
:type dns_settings: ~azure.mgmt.network.v2019_09_01.models.PublicIPAddressDnsSettings
:param ddos_settings: The DDoS protection custom policy associated with the public IP address.
:type ddos_settings: ~azure.mgmt.network.v2019_09_01.models.DdosSettings
:param ip_tags: The list of tags associated with the public IP address.
:type ip_tags: list[~azure.mgmt.network.v2019_09_01.models.IpTag]
:param ip_address: The IP address associated with the public IP address resource.
:type ip_address: str
:param public_ip_prefix: The Public IP Prefix this Public IP Address should be allocated from.
:type public_ip_prefix: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param idle_timeout_in_minutes: The idle timeout of the public IP address.
:type idle_timeout_in_minutes: int
:ivar resource_guid: The resource GUID property of the public IP address resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the public IP address resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'ip_configuration': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'PublicIPAddressSku'},
'etag': {'key': 'etag', 'type': 'str'},
'zones': {'key': 'zones', 'type': '[str]'},
'public_ip_allocation_method': {'key': 'properties.publicIPAllocationMethod', 'type': 'str'},
'public_ip_address_version': {'key': 'properties.publicIPAddressVersion', 'type': 'str'},
'ip_configuration': {'key': 'properties.ipConfiguration', 'type': 'IPConfiguration'},
'dns_settings': {'key': 'properties.dnsSettings', 'type': 'PublicIPAddressDnsSettings'},
'ddos_settings': {'key': 'properties.ddosSettings', 'type': 'DdosSettings'},
'ip_tags': {'key': 'properties.ipTags', 'type': '[IpTag]'},
'ip_address': {'key': 'properties.ipAddress', 'type': 'str'},
'public_ip_prefix': {'key': 'properties.publicIPPrefix', 'type': 'SubResource'},
'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPAddress, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.etag = None
self.zones = kwargs.get('zones', None)
self.public_ip_allocation_method = kwargs.get('public_ip_allocation_method', None)
self.public_ip_address_version = kwargs.get('public_ip_address_version', None)
self.ip_configuration = None
self.dns_settings = kwargs.get('dns_settings', None)
self.ddos_settings = kwargs.get('ddos_settings', None)
self.ip_tags = kwargs.get('ip_tags', None)
self.ip_address = kwargs.get('ip_address', None)
self.public_ip_prefix = kwargs.get('public_ip_prefix', None)
self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
self.resource_guid = None
self.provisioning_state = None
class PublicIPAddressDnsSettings(msrest.serialization.Model):
"""Contains FQDN of the DNS record associated with the public IP address.
:param domain_name_label: The domain name label. The concatenation of the domain name label and
the regionalized DNS zone make up the fully qualified domain name associated with the public IP
address. If a domain name label is specified, an A DNS record is created for the public IP in
the Microsoft Azure DNS system.
:type domain_name_label: str
:param fqdn: The Fully Qualified Domain Name of the A DNS record associated with the public IP.
This is the concatenation of the domainNameLabel and the regionalized DNS zone.
:type fqdn: str
:param reverse_fqdn: The reverse FQDN. A user-visible, fully qualified domain name that
resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS record is
created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN.
:type reverse_fqdn: str
"""
_attribute_map = {
'domain_name_label': {'key': 'domainNameLabel', 'type': 'str'},
'fqdn': {'key': 'fqdn', 'type': 'str'},
'reverse_fqdn': {'key': 'reverseFqdn', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPAddressDnsSettings, self).__init__(**kwargs)
self.domain_name_label = kwargs.get('domain_name_label', None)
self.fqdn = kwargs.get('fqdn', None)
self.reverse_fqdn = kwargs.get('reverse_fqdn', None)
class PublicIPAddressListResult(msrest.serialization.Model):
"""Response for ListPublicIpAddresses API service call.
:param value: A list of public IP addresses that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.PublicIPAddress]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PublicIPAddress]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPAddressListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class PublicIPAddressSku(msrest.serialization.Model):
"""SKU of a public IP address.
:param name: Name of a public IP address SKU. Possible values include: "Basic", "Standard".
:type name: str or ~azure.mgmt.network.v2019_09_01.models.PublicIPAddressSkuName
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPAddressSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class PublicIPPrefix(Resource):
"""Public IP prefix resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: The public IP prefix SKU.
:type sku: ~azure.mgmt.network.v2019_09_01.models.PublicIPPrefixSku
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param zones: A list of availability zones denoting the IP allocated for the resource needs to
come from.
:type zones: list[str]
:param public_ip_address_version: The public IP address version. Possible values include:
"IPv4", "IPv6".
:type public_ip_address_version: str or ~azure.mgmt.network.v2019_09_01.models.IPVersion
:param ip_tags: The list of tags associated with the public IP prefix.
:type ip_tags: list[~azure.mgmt.network.v2019_09_01.models.IpTag]
:param prefix_length: The Length of the Public IP Prefix.
:type prefix_length: int
:ivar ip_prefix: The allocated Prefix.
:vartype ip_prefix: str
:ivar public_ip_addresses: The list of all referenced PublicIPAddresses.
:vartype public_ip_addresses:
list[~azure.mgmt.network.v2019_09_01.models.ReferencedPublicIpAddress]
:ivar load_balancer_frontend_ip_configuration: The reference to load balancer frontend IP
configuration associated with the public IP prefix.
:vartype load_balancer_frontend_ip_configuration:
~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar resource_guid: The resource GUID property of the public IP prefix resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the public IP prefix resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'ip_prefix': {'readonly': True},
'public_ip_addresses': {'readonly': True},
'load_balancer_frontend_ip_configuration': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'PublicIPPrefixSku'},
'etag': {'key': 'etag', 'type': 'str'},
'zones': {'key': 'zones', 'type': '[str]'},
'public_ip_address_version': {'key': 'properties.publicIPAddressVersion', 'type': 'str'},
'ip_tags': {'key': 'properties.ipTags', 'type': '[IpTag]'},
'prefix_length': {'key': 'properties.prefixLength', 'type': 'int'},
'ip_prefix': {'key': 'properties.ipPrefix', 'type': 'str'},
'public_ip_addresses': {'key': 'properties.publicIPAddresses', 'type': '[ReferencedPublicIpAddress]'},
'load_balancer_frontend_ip_configuration': {'key': 'properties.loadBalancerFrontendIpConfiguration', 'type': 'SubResource'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPPrefix, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.etag = None
self.zones = kwargs.get('zones', None)
self.public_ip_address_version = kwargs.get('public_ip_address_version', None)
self.ip_tags = kwargs.get('ip_tags', None)
self.prefix_length = kwargs.get('prefix_length', None)
self.ip_prefix = None
self.public_ip_addresses = None
self.load_balancer_frontend_ip_configuration = None
self.resource_guid = None
self.provisioning_state = None
class PublicIPPrefixListResult(msrest.serialization.Model):
"""Response for ListPublicIpPrefixes API service call.
:param value: A list of public IP prefixes that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.PublicIPPrefix]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[PublicIPPrefix]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPPrefixListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class PublicIPPrefixSku(msrest.serialization.Model):
"""SKU of a public IP prefix.
:param name: Name of a public IP prefix SKU. Possible values include: "Standard".
:type name: str or ~azure.mgmt.network.v2019_09_01.models.PublicIPPrefixSkuName
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(PublicIPPrefixSku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class QueryTroubleshootingParameters(msrest.serialization.Model):
"""Parameters that define the resource to query the troubleshooting result.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The target resource ID to query the troubleshooting
result.
:type target_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(QueryTroubleshootingParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
class ReferencedPublicIpAddress(msrest.serialization.Model):
"""Reference to a public IP address.
:param id: The PublicIPAddress Reference.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ReferencedPublicIpAddress, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class ResourceNavigationLink(SubResource):
"""ResourceNavigationLink resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Resource type.
:vartype type: str
:param linked_resource_type: Resource type of the linked resource.
:type linked_resource_type: str
:param link: Link to the external resource.
:type link: str
:ivar provisioning_state: The provisioning state of the resource navigation link resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'linked_resource_type': {'key': 'properties.linkedResourceType', 'type': 'str'},
'link': {'key': 'properties.link', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceNavigationLink, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.linked_resource_type = kwargs.get('linked_resource_type', None)
self.link = kwargs.get('link', None)
self.provisioning_state = None
class ResourceNavigationLinksListResult(msrest.serialization.Model):
"""Response for ResourceNavigationLinks_List operation.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: The resource navigation links in a subnet.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ResourceNavigationLink]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceNavigationLink]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceNavigationLinksListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class RetentionPolicyParameters(msrest.serialization.Model):
"""Parameters that define the retention policy for flow log.
:param days: Number of days to retain flow log records.
:type days: int
:param enabled: Flag to enable/disable retention.
:type enabled: bool
"""
_attribute_map = {
'days': {'key': 'days', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(RetentionPolicyParameters, self).__init__(**kwargs)
self.days = kwargs.get('days', 0)
self.enabled = kwargs.get('enabled', False)
class Route(SubResource):
"""Route resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param address_prefix: The destination CIDR to which the route applies.
:type address_prefix: str
:param next_hop_type: The type of Azure hop the packet should be sent to. Possible values
include: "VirtualNetworkGateway", "VnetLocal", "Internet", "VirtualAppliance", "None".
:type next_hop_type: str or ~azure.mgmt.network.v2019_09_01.models.RouteNextHopType
:param next_hop_ip_address: The IP address packets should be forwarded to. Next hop values are
only allowed in routes where the next hop type is VirtualAppliance.
:type next_hop_ip_address: str
:ivar provisioning_state: The provisioning state of the route resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'next_hop_type': {'key': 'properties.nextHopType', 'type': 'str'},
'next_hop_ip_address': {'key': 'properties.nextHopIpAddress', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Route, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.address_prefix = kwargs.get('address_prefix', None)
self.next_hop_type = kwargs.get('next_hop_type', None)
self.next_hop_ip_address = kwargs.get('next_hop_ip_address', None)
self.provisioning_state = None
class RouteFilter(Resource):
"""Route Filter Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param rules: Collection of RouteFilterRules contained within a route filter.
:type rules: list[~azure.mgmt.network.v2019_09_01.models.RouteFilterRule]
:ivar peerings: A collection of references to express route circuit peerings.
:vartype peerings: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeering]
:ivar ipv6_peerings: A collection of references to express route circuit ipv6 peerings.
:vartype ipv6_peerings: list[~azure.mgmt.network.v2019_09_01.models.ExpressRouteCircuitPeering]
:ivar provisioning_state: The provisioning state of the route filter resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'peerings': {'readonly': True},
'ipv6_peerings': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'rules': {'key': 'properties.rules', 'type': '[RouteFilterRule]'},
'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCircuitPeering]'},
'ipv6_peerings': {'key': 'properties.ipv6Peerings', 'type': '[ExpressRouteCircuitPeering]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteFilter, self).__init__(**kwargs)
self.etag = None
self.rules = kwargs.get('rules', None)
self.peerings = None
self.ipv6_peerings = None
self.provisioning_state = None
class RouteFilterListResult(msrest.serialization.Model):
"""Response for the ListRouteFilters API service call.
:param value: A list of route filters in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.RouteFilter]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[RouteFilter]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteFilterListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class RouteFilterRule(SubResource):
"""Route Filter Rule Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param location: Resource location.
:type location: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param access: The access type of the rule. Possible values include: "Allow", "Deny".
:type access: str or ~azure.mgmt.network.v2019_09_01.models.Access
:param route_filter_rule_type: The rule type of the rule. Possible values include: "Community".
:type route_filter_rule_type: str or ~azure.mgmt.network.v2019_09_01.models.RouteFilterRuleType
:param communities: The collection for bgp community values to filter on. e.g.
['12076:5010','12076:5020'].
:type communities: list[str]
:ivar provisioning_state: The provisioning state of the route filter rule resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'access': {'key': 'properties.access', 'type': 'str'},
'route_filter_rule_type': {'key': 'properties.routeFilterRuleType', 'type': 'str'},
'communities': {'key': 'properties.communities', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteFilterRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.location = kwargs.get('location', None)
self.etag = None
self.access = kwargs.get('access', None)
self.route_filter_rule_type = kwargs.get('route_filter_rule_type', None)
self.communities = kwargs.get('communities', None)
self.provisioning_state = None
class RouteFilterRuleListResult(msrest.serialization.Model):
"""Response for the ListRouteFilterRules API service call.
:param value: A list of RouteFilterRules in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.RouteFilterRule]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[RouteFilterRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteFilterRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class RouteListResult(msrest.serialization.Model):
"""Response for the ListRoute API service call.
:param value: A list of routes in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.Route]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Route]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class RouteTable(Resource):
"""Route table resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param routes: Collection of routes contained within a route table.
:type routes: list[~azure.mgmt.network.v2019_09_01.models.Route]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2019_09_01.models.Subnet]
:param disable_bgp_route_propagation: Whether to disable the routes learned by BGP on that
route table. True means disable.
:type disable_bgp_route_propagation: bool
:ivar provisioning_state: The provisioning state of the route table resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'subnets': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'routes': {'key': 'properties.routes', 'type': '[Route]'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'disable_bgp_route_propagation': {'key': 'properties.disableBgpRoutePropagation', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteTable, self).__init__(**kwargs)
self.etag = None
self.routes = kwargs.get('routes', None)
self.subnets = None
self.disable_bgp_route_propagation = kwargs.get('disable_bgp_route_propagation', None)
self.provisioning_state = None
class RouteTableListResult(msrest.serialization.Model):
"""Response for the ListRouteTable API service call.
:param value: A list of route tables in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.RouteTable]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[RouteTable]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RouteTableListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class SecurityGroupNetworkInterface(msrest.serialization.Model):
"""Network interface and all its associated security rules.
:param id: ID of the network interface.
:type id: str
:param security_rule_associations: All security rules associated with the network interface.
:type security_rule_associations:
~azure.mgmt.network.v2019_09_01.models.SecurityRuleAssociations
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rule_associations': {'key': 'securityRuleAssociations', 'type': 'SecurityRuleAssociations'},
}
def __init__(
self,
**kwargs
):
super(SecurityGroupNetworkInterface, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.security_rule_associations = kwargs.get('security_rule_associations', None)
class SecurityGroupViewParameters(msrest.serialization.Model):
"""Parameters that define the VM to check security groups for.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. ID of the target VM.
:type target_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SecurityGroupViewParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
class SecurityGroupViewResult(msrest.serialization.Model):
"""The information about security rules applied to the specified VM.
:param network_interfaces: List of network interfaces on the specified VM.
:type network_interfaces:
list[~azure.mgmt.network.v2019_09_01.models.SecurityGroupNetworkInterface]
"""
_attribute_map = {
'network_interfaces': {'key': 'networkInterfaces', 'type': '[SecurityGroupNetworkInterface]'},
}
def __init__(
self,
**kwargs
):
super(SecurityGroupViewResult, self).__init__(**kwargs)
self.network_interfaces = kwargs.get('network_interfaces', None)
class SecurityRule(SubResource):
"""Network security rule.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param description: A description for this rule. Restricted to 140 chars.
:type description: str
:param protocol: Network protocol this rule applies to. Possible values include: "Tcp", "Udp",
"Icmp", "Esp", "*", "Ah".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.SecurityRuleProtocol
:param source_port_range: The source port or range. Integer or range between 0 and 65535.
Asterisk '*' can also be used to match all ports.
:type source_port_range: str
:param destination_port_range: The destination port or range. Integer or range between 0 and
65535. Asterisk '*' can also be used to match all ports.
:type destination_port_range: str
:param source_address_prefix: The CIDR or source IP range. Asterisk '*' can also be used to
match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet'
can also be used. If this is an ingress rule, specifies where network traffic originates from.
:type source_address_prefix: str
:param source_address_prefixes: The CIDR or source IP ranges.
:type source_address_prefixes: list[str]
:param source_application_security_groups: The application security group specified as source.
:type source_application_security_groups:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationSecurityGroup]
:param destination_address_prefix: The destination address prefix. CIDR or destination IP
range. Asterisk '*' can also be used to match all source IPs. Default tags such as
'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used.
:type destination_address_prefix: str
:param destination_address_prefixes: The destination address prefixes. CIDR or destination IP
ranges.
:type destination_address_prefixes: list[str]
:param destination_application_security_groups: The application security group specified as
destination.
:type destination_application_security_groups:
list[~azure.mgmt.network.v2019_09_01.models.ApplicationSecurityGroup]
:param source_port_ranges: The source port ranges.
:type source_port_ranges: list[str]
:param destination_port_ranges: The destination port ranges.
:type destination_port_ranges: list[str]
:param access: The network traffic is allowed or denied. Possible values include: "Allow",
"Deny".
:type access: str or ~azure.mgmt.network.v2019_09_01.models.SecurityRuleAccess
:param priority: The priority of the rule. The value can be between 100 and 4096. The priority
number must be unique for each rule in the collection. The lower the priority number, the
higher the priority of the rule.
:type priority: int
:param direction: The direction of the rule. The direction specifies if rule will be evaluated
on incoming or outgoing traffic. Possible values include: "Inbound", "Outbound".
:type direction: str or ~azure.mgmt.network.v2019_09_01.models.SecurityRuleDirection
:ivar provisioning_state: The provisioning state of the security rule resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'source_port_range': {'key': 'properties.sourcePortRange', 'type': 'str'},
'destination_port_range': {'key': 'properties.destinationPortRange', 'type': 'str'},
'source_address_prefix': {'key': 'properties.sourceAddressPrefix', 'type': 'str'},
'source_address_prefixes': {'key': 'properties.sourceAddressPrefixes', 'type': '[str]'},
'source_application_security_groups': {'key': 'properties.sourceApplicationSecurityGroups', 'type': '[ApplicationSecurityGroup]'},
'destination_address_prefix': {'key': 'properties.destinationAddressPrefix', 'type': 'str'},
'destination_address_prefixes': {'key': 'properties.destinationAddressPrefixes', 'type': '[str]'},
'destination_application_security_groups': {'key': 'properties.destinationApplicationSecurityGroups', 'type': '[ApplicationSecurityGroup]'},
'source_port_ranges': {'key': 'properties.sourcePortRanges', 'type': '[str]'},
'destination_port_ranges': {'key': 'properties.destinationPortRanges', 'type': '[str]'},
'access': {'key': 'properties.access', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'direction': {'key': 'properties.direction', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SecurityRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.description = kwargs.get('description', None)
self.protocol = kwargs.get('protocol', None)
self.source_port_range = kwargs.get('source_port_range', None)
self.destination_port_range = kwargs.get('destination_port_range', None)
self.source_address_prefix = kwargs.get('source_address_prefix', None)
self.source_address_prefixes = kwargs.get('source_address_prefixes', None)
self.source_application_security_groups = kwargs.get('source_application_security_groups', None)
self.destination_address_prefix = kwargs.get('destination_address_prefix', None)
self.destination_address_prefixes = kwargs.get('destination_address_prefixes', None)
self.destination_application_security_groups = kwargs.get('destination_application_security_groups', None)
self.source_port_ranges = kwargs.get('source_port_ranges', None)
self.destination_port_ranges = kwargs.get('destination_port_ranges', None)
self.access = kwargs.get('access', None)
self.priority = kwargs.get('priority', None)
self.direction = kwargs.get('direction', None)
self.provisioning_state = None
class SecurityRuleAssociations(msrest.serialization.Model):
"""All security rules associated with the network interface.
:param network_interface_association: Network interface and it's custom security rules.
:type network_interface_association:
~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceAssociation
:param subnet_association: Subnet and it's custom security rules.
:type subnet_association: ~azure.mgmt.network.v2019_09_01.models.SubnetAssociation
:param default_security_rules: Collection of default security rules of the network security
group.
:type default_security_rules: list[~azure.mgmt.network.v2019_09_01.models.SecurityRule]
:param effective_security_rules: Collection of effective security rules.
:type effective_security_rules:
list[~azure.mgmt.network.v2019_09_01.models.EffectiveNetworkSecurityRule]
"""
_attribute_map = {
'network_interface_association': {'key': 'networkInterfaceAssociation', 'type': 'NetworkInterfaceAssociation'},
'subnet_association': {'key': 'subnetAssociation', 'type': 'SubnetAssociation'},
'default_security_rules': {'key': 'defaultSecurityRules', 'type': '[SecurityRule]'},
'effective_security_rules': {'key': 'effectiveSecurityRules', 'type': '[EffectiveNetworkSecurityRule]'},
}
def __init__(
self,
**kwargs
):
super(SecurityRuleAssociations, self).__init__(**kwargs)
self.network_interface_association = kwargs.get('network_interface_association', None)
self.subnet_association = kwargs.get('subnet_association', None)
self.default_security_rules = kwargs.get('default_security_rules', None)
self.effective_security_rules = kwargs.get('effective_security_rules', None)
class SecurityRuleListResult(msrest.serialization.Model):
"""Response for ListSecurityRule API service call. Retrieves all security rules that belongs to a network security group.
:param value: The security rules in a network security group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.SecurityRule]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[SecurityRule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SecurityRuleListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ServiceAssociationLink(SubResource):
"""ServiceAssociationLink resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Resource type.
:vartype type: str
:param linked_resource_type: Resource type of the linked resource.
:type linked_resource_type: str
:param link: Link to the external resource.
:type link: str
:ivar provisioning_state: The provisioning state of the service association link resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param allow_delete: If true, the resource can be deleted.
:type allow_delete: bool
:param locations: A list of locations.
:type locations: list[str]
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'linked_resource_type': {'key': 'properties.linkedResourceType', 'type': 'str'},
'link': {'key': 'properties.link', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'allow_delete': {'key': 'properties.allowDelete', 'type': 'bool'},
'locations': {'key': 'properties.locations', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ServiceAssociationLink, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.linked_resource_type = kwargs.get('linked_resource_type', None)
self.link = kwargs.get('link', None)
self.provisioning_state = None
self.allow_delete = kwargs.get('allow_delete', None)
self.locations = kwargs.get('locations', None)
class ServiceAssociationLinksListResult(msrest.serialization.Model):
"""Response for ServiceAssociationLinks_List operation.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: The service association links in a subnet.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ServiceAssociationLink]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ServiceAssociationLink]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceAssociationLinksListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ServiceEndpointPolicy(Resource):
"""Service End point policy resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param service_endpoint_policy_definitions: A collection of service endpoint policy definitions
of the service endpoint policy.
:type service_endpoint_policy_definitions:
list[~azure.mgmt.network.v2019_09_01.models.ServiceEndpointPolicyDefinition]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2019_09_01.models.Subnet]
:ivar resource_guid: The resource GUID property of the service endpoint policy resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the service endpoint policy resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'subnets': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'service_endpoint_policy_definitions': {'key': 'properties.serviceEndpointPolicyDefinitions', 'type': '[ServiceEndpointPolicyDefinition]'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPolicy, self).__init__(**kwargs)
self.etag = None
self.service_endpoint_policy_definitions = kwargs.get('service_endpoint_policy_definitions', None)
self.subnets = None
self.resource_guid = None
self.provisioning_state = None
class ServiceEndpointPolicyDefinition(SubResource):
"""Service Endpoint policy definitions.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param description: A description for this rule. Restricted to 140 chars.
:type description: str
:param service: Service endpoint name.
:type service: str
:param service_resources: A list of service resources.
:type service_resources: list[str]
:ivar provisioning_state: The provisioning state of the service endpoint policy definition
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'service': {'key': 'properties.service', 'type': 'str'},
'service_resources': {'key': 'properties.serviceResources', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPolicyDefinition, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.description = kwargs.get('description', None)
self.service = kwargs.get('service', None)
self.service_resources = kwargs.get('service_resources', None)
self.provisioning_state = None
class ServiceEndpointPolicyDefinitionListResult(msrest.serialization.Model):
"""Response for ListServiceEndpointPolicyDefinition API service call. Retrieves all service endpoint policy definition that belongs to a service endpoint policy.
:param value: The service endpoint policy definition in a service endpoint policy.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ServiceEndpointPolicyDefinition]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ServiceEndpointPolicyDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPolicyDefinitionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class ServiceEndpointPolicyListResult(msrest.serialization.Model):
"""Response for ListServiceEndpointPolicies API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of ServiceEndpointPolicy resources.
:type value: list[~azure.mgmt.network.v2019_09_01.models.ServiceEndpointPolicy]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ServiceEndpointPolicy]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPolicyListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class ServiceEndpointPropertiesFormat(msrest.serialization.Model):
"""The service endpoint properties.
Variables are only populated by the server, and will be ignored when sending a request.
:param service: The type of the endpoint service.
:type service: str
:param locations: A list of locations.
:type locations: list[str]
:ivar provisioning_state: The provisioning state of the service endpoint resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'service': {'key': 'service', 'type': 'str'},
'locations': {'key': 'locations', 'type': '[str]'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceEndpointPropertiesFormat, self).__init__(**kwargs)
self.service = kwargs.get('service', None)
self.locations = kwargs.get('locations', None)
self.provisioning_state = None
class ServiceTagInformation(msrest.serialization.Model):
"""The service tag information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar properties: Properties of the service tag information.
:vartype properties:
~azure.mgmt.network.v2019_09_01.models.ServiceTagInformationPropertiesFormat
:ivar name: The name of service tag.
:vartype name: str
:ivar id: The ID of service tag.
:vartype id: str
"""
_validation = {
'properties': {'readonly': True},
'name': {'readonly': True},
'id': {'readonly': True},
}
_attribute_map = {
'properties': {'key': 'properties', 'type': 'ServiceTagInformationPropertiesFormat'},
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ServiceTagInformation, self).__init__(**kwargs)
self.properties = None
self.name = None
self.id = None
class ServiceTagInformationPropertiesFormat(msrest.serialization.Model):
"""Properties of the service tag information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar change_number: The iteration number of service tag.
:vartype change_number: str
:ivar region: The region of service tag.
:vartype region: str
:ivar system_service: The name of system service.
:vartype system_service: str
:ivar address_prefixes: The list of IP address prefixes.
:vartype address_prefixes: list[str]
"""
_validation = {
'change_number': {'readonly': True},
'region': {'readonly': True},
'system_service': {'readonly': True},
'address_prefixes': {'readonly': True},
}
_attribute_map = {
'change_number': {'key': 'changeNumber', 'type': 'str'},
'region': {'key': 'region', 'type': 'str'},
'system_service': {'key': 'systemService', 'type': 'str'},
'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(ServiceTagInformationPropertiesFormat, self).__init__(**kwargs)
self.change_number = None
self.region = None
self.system_service = None
self.address_prefixes = None
class ServiceTagsListResult(msrest.serialization.Model):
"""Response for the ListServiceTags API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: The name of the cloud.
:vartype name: str
:ivar id: The ID of the cloud.
:vartype id: str
:ivar type: The azure resource type.
:vartype type: str
:ivar change_number: The iteration number.
:vartype change_number: str
:ivar cloud: The name of the cloud.
:vartype cloud: str
:ivar values: The list of service tag information resources.
:vartype values: list[~azure.mgmt.network.v2019_09_01.models.ServiceTagInformation]
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'type': {'readonly': True},
'change_number': {'readonly': True},
'cloud': {'readonly': True},
'values': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'change_number': {'key': 'changeNumber', 'type': 'str'},
'cloud': {'key': 'cloud', 'type': 'str'},
'values': {'key': 'values', 'type': '[ServiceTagInformation]'},
}
def __init__(
self,
**kwargs
):
super(ServiceTagsListResult, self).__init__(**kwargs)
self.name = None
self.id = None
self.type = None
self.change_number = None
self.cloud = None
self.values = None
class SessionIds(msrest.serialization.Model):
"""List of session ids.
:param session_ids: List of session ids.
:type session_ids: list[str]
"""
_attribute_map = {
'session_ids': {'key': 'sessionIds', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(SessionIds, self).__init__(**kwargs)
self.session_ids = kwargs.get('session_ids', None)
class Subnet(SubResource):
"""Subnet in a virtual network resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param address_prefix: The address prefix for the subnet.
:type address_prefix: str
:param address_prefixes: List of address prefixes for the subnet.
:type address_prefixes: list[str]
:param network_security_group: The reference of the NetworkSecurityGroup resource.
:type network_security_group: ~azure.mgmt.network.v2019_09_01.models.NetworkSecurityGroup
:param route_table: The reference of the RouteTable resource.
:type route_table: ~azure.mgmt.network.v2019_09_01.models.RouteTable
:param nat_gateway: Nat gateway associated with this subnet.
:type nat_gateway: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param service_endpoints: An array of service endpoints.
:type service_endpoints:
list[~azure.mgmt.network.v2019_09_01.models.ServiceEndpointPropertiesFormat]
:param service_endpoint_policies: An array of service endpoint policies.
:type service_endpoint_policies:
list[~azure.mgmt.network.v2019_09_01.models.ServiceEndpointPolicy]
:ivar private_endpoints: An array of references to private endpoints.
:vartype private_endpoints: list[~azure.mgmt.network.v2019_09_01.models.PrivateEndpoint]
:ivar ip_configurations: An array of references to the network interface IP configurations
using subnet.
:vartype ip_configurations: list[~azure.mgmt.network.v2019_09_01.models.IPConfiguration]
:ivar ip_configuration_profiles: Array of IP configuration profiles which reference this
subnet.
:vartype ip_configuration_profiles:
list[~azure.mgmt.network.v2019_09_01.models.IPConfigurationProfile]
:ivar resource_navigation_links: An array of references to the external resources using subnet.
:vartype resource_navigation_links:
list[~azure.mgmt.network.v2019_09_01.models.ResourceNavigationLink]
:ivar service_association_links: An array of references to services injecting into this subnet.
:vartype service_association_links:
list[~azure.mgmt.network.v2019_09_01.models.ServiceAssociationLink]
:param delegations: An array of references to the delegations on the subnet.
:type delegations: list[~azure.mgmt.network.v2019_09_01.models.Delegation]
:ivar purpose: A read-only string identifying the intention of use for this subnet based on
delegations and other user-defined properties.
:vartype purpose: str
:ivar provisioning_state: The provisioning state of the subnet resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param private_endpoint_network_policies: Enable or Disable apply network policies on private
end point in the subnet.
:type private_endpoint_network_policies: str
:param private_link_service_network_policies: Enable or Disable apply network policies on
private link service in the subnet.
:type private_link_service_network_policies: str
"""
_validation = {
'etag': {'readonly': True},
'private_endpoints': {'readonly': True},
'ip_configurations': {'readonly': True},
'ip_configuration_profiles': {'readonly': True},
'resource_navigation_links': {'readonly': True},
'service_association_links': {'readonly': True},
'purpose': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'address_prefixes': {'key': 'properties.addressPrefixes', 'type': '[str]'},
'network_security_group': {'key': 'properties.networkSecurityGroup', 'type': 'NetworkSecurityGroup'},
'route_table': {'key': 'properties.routeTable', 'type': 'RouteTable'},
'nat_gateway': {'key': 'properties.natGateway', 'type': 'SubResource'},
'service_endpoints': {'key': 'properties.serviceEndpoints', 'type': '[ServiceEndpointPropertiesFormat]'},
'service_endpoint_policies': {'key': 'properties.serviceEndpointPolicies', 'type': '[ServiceEndpointPolicy]'},
'private_endpoints': {'key': 'properties.privateEndpoints', 'type': '[PrivateEndpoint]'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[IPConfiguration]'},
'ip_configuration_profiles': {'key': 'properties.ipConfigurationProfiles', 'type': '[IPConfigurationProfile]'},
'resource_navigation_links': {'key': 'properties.resourceNavigationLinks', 'type': '[ResourceNavigationLink]'},
'service_association_links': {'key': 'properties.serviceAssociationLinks', 'type': '[ServiceAssociationLink]'},
'delegations': {'key': 'properties.delegations', 'type': '[Delegation]'},
'purpose': {'key': 'properties.purpose', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_endpoint_network_policies': {'key': 'properties.privateEndpointNetworkPolicies', 'type': 'str'},
'private_link_service_network_policies': {'key': 'properties.privateLinkServiceNetworkPolicies', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Subnet, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.address_prefix = kwargs.get('address_prefix', None)
self.address_prefixes = kwargs.get('address_prefixes', None)
self.network_security_group = kwargs.get('network_security_group', None)
self.route_table = kwargs.get('route_table', None)
self.nat_gateway = kwargs.get('nat_gateway', None)
self.service_endpoints = kwargs.get('service_endpoints', None)
self.service_endpoint_policies = kwargs.get('service_endpoint_policies', None)
self.private_endpoints = None
self.ip_configurations = None
self.ip_configuration_profiles = None
self.resource_navigation_links = None
self.service_association_links = None
self.delegations = kwargs.get('delegations', None)
self.purpose = None
self.provisioning_state = None
self.private_endpoint_network_policies = kwargs.get('private_endpoint_network_policies', None)
self.private_link_service_network_policies = kwargs.get('private_link_service_network_policies', None)
class SubnetAssociation(msrest.serialization.Model):
"""Subnet and it's custom security rules.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Subnet ID.
:vartype id: str
:param security_rules: Collection of custom security rules.
:type security_rules: list[~azure.mgmt.network.v2019_09_01.models.SecurityRule]
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'},
}
def __init__(
self,
**kwargs
):
super(SubnetAssociation, self).__init__(**kwargs)
self.id = None
self.security_rules = kwargs.get('security_rules', None)
class SubnetListResult(msrest.serialization.Model):
"""Response for ListSubnets API service callRetrieves all subnet that belongs to a virtual network.
:param value: The subnets in a virtual network.
:type value: list[~azure.mgmt.network.v2019_09_01.models.Subnet]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Subnet]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SubnetListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class TagsObject(msrest.serialization.Model):
"""Tags object for patch operations.
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(TagsObject, self).__init__(**kwargs)
self.tags = kwargs.get('tags', None)
class Topology(msrest.serialization.Model):
"""Topology of the specified resource group.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: GUID representing the operation id.
:vartype id: str
:ivar created_date_time: The datetime when the topology was initially created for the resource
group.
:vartype created_date_time: ~datetime.datetime
:ivar last_modified: The datetime when the topology was last modified.
:vartype last_modified: ~datetime.datetime
:param resources: A list of topology resources.
:type resources: list[~azure.mgmt.network.v2019_09_01.models.TopologyResource]
"""
_validation = {
'id': {'readonly': True},
'created_date_time': {'readonly': True},
'last_modified': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'resources': {'key': 'resources', 'type': '[TopologyResource]'},
}
def __init__(
self,
**kwargs
):
super(Topology, self).__init__(**kwargs)
self.id = None
self.created_date_time = None
self.last_modified = None
self.resources = kwargs.get('resources', None)
class TopologyAssociation(msrest.serialization.Model):
"""Resources that have an association with the parent resource.
:param name: The name of the resource that is associated with the parent resource.
:type name: str
:param resource_id: The ID of the resource that is associated with the parent resource.
:type resource_id: str
:param association_type: The association type of the child resource to the parent resource.
Possible values include: "Associated", "Contains".
:type association_type: str or ~azure.mgmt.network.v2019_09_01.models.AssociationType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'association_type': {'key': 'associationType', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TopologyAssociation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.resource_id = kwargs.get('resource_id', None)
self.association_type = kwargs.get('association_type', None)
class TopologyParameters(msrest.serialization.Model):
"""Parameters that define the representation of topology.
:param target_resource_group_name: The name of the target resource group to perform topology
on.
:type target_resource_group_name: str
:param target_virtual_network: The reference of the Virtual Network resource.
:type target_virtual_network: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param target_subnet: The reference of the Subnet resource.
:type target_subnet: ~azure.mgmt.network.v2019_09_01.models.SubResource
"""
_attribute_map = {
'target_resource_group_name': {'key': 'targetResourceGroupName', 'type': 'str'},
'target_virtual_network': {'key': 'targetVirtualNetwork', 'type': 'SubResource'},
'target_subnet': {'key': 'targetSubnet', 'type': 'SubResource'},
}
def __init__(
self,
**kwargs
):
super(TopologyParameters, self).__init__(**kwargs)
self.target_resource_group_name = kwargs.get('target_resource_group_name', None)
self.target_virtual_network = kwargs.get('target_virtual_network', None)
self.target_subnet = kwargs.get('target_subnet', None)
class TopologyResource(msrest.serialization.Model):
"""The network resource topology information for the given resource group.
:param name: Name of the resource.
:type name: str
:param id: ID of the resource.
:type id: str
:param location: Resource location.
:type location: str
:param associations: Holds the associations the resource has with other resources in the
resource group.
:type associations: list[~azure.mgmt.network.v2019_09_01.models.TopologyAssociation]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'associations': {'key': 'associations', 'type': '[TopologyAssociation]'},
}
def __init__(
self,
**kwargs
):
super(TopologyResource, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.id = kwargs.get('id', None)
self.location = kwargs.get('location', None)
self.associations = kwargs.get('associations', None)
class TrafficAnalyticsConfigurationProperties(msrest.serialization.Model):
"""Parameters that define the configuration of traffic analytics.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Flag to enable/disable traffic analytics.
:type enabled: bool
:param workspace_id: The resource guid of the attached workspace.
:type workspace_id: str
:param workspace_region: The location of the attached workspace.
:type workspace_region: str
:param workspace_resource_id: Resource Id of the attached workspace.
:type workspace_resource_id: str
:param traffic_analytics_interval: The interval in minutes which would decide how frequently TA
service should do flow analytics.
:type traffic_analytics_interval: int
"""
_validation = {
'enabled': {'required': True},
}
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
'workspace_id': {'key': 'workspaceId', 'type': 'str'},
'workspace_region': {'key': 'workspaceRegion', 'type': 'str'},
'workspace_resource_id': {'key': 'workspaceResourceId', 'type': 'str'},
'traffic_analytics_interval': {'key': 'trafficAnalyticsInterval', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(TrafficAnalyticsConfigurationProperties, self).__init__(**kwargs)
self.enabled = kwargs['enabled']
self.workspace_id = kwargs.get('workspace_id', None)
self.workspace_region = kwargs.get('workspace_region', None)
self.workspace_resource_id = kwargs.get('workspace_resource_id', None)
self.traffic_analytics_interval = kwargs.get('traffic_analytics_interval', None)
class TrafficAnalyticsProperties(msrest.serialization.Model):
"""Parameters that define the configuration of traffic analytics.
All required parameters must be populated in order to send to Azure.
:param network_watcher_flow_analytics_configuration: Required. Parameters that define the
configuration of traffic analytics.
:type network_watcher_flow_analytics_configuration:
~azure.mgmt.network.v2019_09_01.models.TrafficAnalyticsConfigurationProperties
"""
_validation = {
'network_watcher_flow_analytics_configuration': {'required': True},
}
_attribute_map = {
'network_watcher_flow_analytics_configuration': {'key': 'networkWatcherFlowAnalyticsConfiguration', 'type': 'TrafficAnalyticsConfigurationProperties'},
}
def __init__(
self,
**kwargs
):
super(TrafficAnalyticsProperties, self).__init__(**kwargs)
self.network_watcher_flow_analytics_configuration = kwargs['network_watcher_flow_analytics_configuration']
class TrafficSelectorPolicy(msrest.serialization.Model):
"""An traffic selector policy for a virtual network gateway connection.
All required parameters must be populated in order to send to Azure.
:param local_address_ranges: Required. A collection of local address spaces in CIDR format.
:type local_address_ranges: list[str]
:param remote_address_ranges: Required. A collection of remote address spaces in CIDR format.
:type remote_address_ranges: list[str]
"""
_validation = {
'local_address_ranges': {'required': True},
'remote_address_ranges': {'required': True},
}
_attribute_map = {
'local_address_ranges': {'key': 'localAddressRanges', 'type': '[str]'},
'remote_address_ranges': {'key': 'remoteAddressRanges', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(TrafficSelectorPolicy, self).__init__(**kwargs)
self.local_address_ranges = kwargs['local_address_ranges']
self.remote_address_ranges = kwargs['remote_address_ranges']
class TroubleshootingDetails(msrest.serialization.Model):
"""Information gained from troubleshooting of specified resource.
:param id: The id of the get troubleshoot operation.
:type id: str
:param reason_type: Reason type of failure.
:type reason_type: str
:param summary: A summary of troubleshooting.
:type summary: str
:param detail: Details on troubleshooting results.
:type detail: str
:param recommended_actions: List of recommended actions.
:type recommended_actions:
list[~azure.mgmt.network.v2019_09_01.models.TroubleshootingRecommendedActions]
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'reason_type': {'key': 'reasonType', 'type': 'str'},
'summary': {'key': 'summary', 'type': 'str'},
'detail': {'key': 'detail', 'type': 'str'},
'recommended_actions': {'key': 'recommendedActions', 'type': '[TroubleshootingRecommendedActions]'},
}
def __init__(
self,
**kwargs
):
super(TroubleshootingDetails, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.reason_type = kwargs.get('reason_type', None)
self.summary = kwargs.get('summary', None)
self.detail = kwargs.get('detail', None)
self.recommended_actions = kwargs.get('recommended_actions', None)
class TroubleshootingParameters(msrest.serialization.Model):
"""Parameters that define the resource to troubleshoot.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The target resource to troubleshoot.
:type target_resource_id: str
:param storage_id: Required. The ID for the storage account to save the troubleshoot result.
:type storage_id: str
:param storage_path: Required. The path to the blob to save the troubleshoot result in.
:type storage_path: str
"""
_validation = {
'target_resource_id': {'required': True},
'storage_id': {'required': True},
'storage_path': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'storage_id': {'key': 'properties.storageId', 'type': 'str'},
'storage_path': {'key': 'properties.storagePath', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TroubleshootingParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.storage_id = kwargs['storage_id']
self.storage_path = kwargs['storage_path']
class TroubleshootingRecommendedActions(msrest.serialization.Model):
"""Recommended actions based on discovered issues.
:param action_id: ID of the recommended action.
:type action_id: str
:param action_text: Description of recommended actions.
:type action_text: str
:param action_uri: The uri linking to a documentation for the recommended troubleshooting
actions.
:type action_uri: str
:param action_uri_text: The information from the URI for the recommended troubleshooting
actions.
:type action_uri_text: str
"""
_attribute_map = {
'action_id': {'key': 'actionId', 'type': 'str'},
'action_text': {'key': 'actionText', 'type': 'str'},
'action_uri': {'key': 'actionUri', 'type': 'str'},
'action_uri_text': {'key': 'actionUriText', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TroubleshootingRecommendedActions, self).__init__(**kwargs)
self.action_id = kwargs.get('action_id', None)
self.action_text = kwargs.get('action_text', None)
self.action_uri = kwargs.get('action_uri', None)
self.action_uri_text = kwargs.get('action_uri_text', None)
class TroubleshootingResult(msrest.serialization.Model):
"""Troubleshooting information gained from specified resource.
:param start_time: The start time of the troubleshooting.
:type start_time: ~datetime.datetime
:param end_time: The end time of the troubleshooting.
:type end_time: ~datetime.datetime
:param code: The result code of the troubleshooting.
:type code: str
:param results: Information from troubleshooting.
:type results: list[~azure.mgmt.network.v2019_09_01.models.TroubleshootingDetails]
"""
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'code': {'key': 'code', 'type': 'str'},
'results': {'key': 'results', 'type': '[TroubleshootingDetails]'},
}
def __init__(
self,
**kwargs
):
super(TroubleshootingResult, self).__init__(**kwargs)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.code = kwargs.get('code', None)
self.results = kwargs.get('results', None)
class TunnelConnectionHealth(msrest.serialization.Model):
"""VirtualNetworkGatewayConnection properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar tunnel: Tunnel name.
:vartype tunnel: str
:ivar connection_status: Virtual Network Gateway connection status. Possible values include:
"Unknown", "Connecting", "Connected", "NotConnected".
:vartype connection_status: str or
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionStatus
:ivar ingress_bytes_transferred: The Ingress Bytes Transferred in this connection.
:vartype ingress_bytes_transferred: long
:ivar egress_bytes_transferred: The Egress Bytes Transferred in this connection.
:vartype egress_bytes_transferred: long
:ivar last_connection_established_utc_time: The time at which connection was established in Utc
format.
:vartype last_connection_established_utc_time: str
"""
_validation = {
'tunnel': {'readonly': True},
'connection_status': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'last_connection_established_utc_time': {'readonly': True},
}
_attribute_map = {
'tunnel': {'key': 'tunnel', 'type': 'str'},
'connection_status': {'key': 'connectionStatus', 'type': 'str'},
'ingress_bytes_transferred': {'key': 'ingressBytesTransferred', 'type': 'long'},
'egress_bytes_transferred': {'key': 'egressBytesTransferred', 'type': 'long'},
'last_connection_established_utc_time': {'key': 'lastConnectionEstablishedUtcTime', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(TunnelConnectionHealth, self).__init__(**kwargs)
self.tunnel = None
self.connection_status = None
self.ingress_bytes_transferred = None
self.egress_bytes_transferred = None
self.last_connection_established_utc_time = None
class UnprepareNetworkPoliciesRequest(msrest.serialization.Model):
"""Details of UnprepareNetworkPolicies for Subnet.
:param service_name: The name of the service for which subnet is being unprepared for.
:type service_name: str
"""
_attribute_map = {
'service_name': {'key': 'serviceName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UnprepareNetworkPoliciesRequest, self).__init__(**kwargs)
self.service_name = kwargs.get('service_name', None)
class Usage(msrest.serialization.Model):
"""Describes network resource usage.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource identifier.
:vartype id: str
:param unit: Required. An enum describing the unit of measurement. Possible values include:
"Count".
:type unit: str or ~azure.mgmt.network.v2019_09_01.models.UsageUnit
:param current_value: Required. The current value of the usage.
:type current_value: long
:param limit: Required. The limit of usage.
:type limit: long
:param name: Required. The name of the type of usage.
:type name: ~azure.mgmt.network.v2019_09_01.models.UsageName
"""
_validation = {
'id': {'readonly': True},
'unit': {'required': True},
'current_value': {'required': True},
'limit': {'required': True},
'name': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'current_value': {'key': 'currentValue', 'type': 'long'},
'limit': {'key': 'limit', 'type': 'long'},
'name': {'key': 'name', 'type': 'UsageName'},
}
def __init__(
self,
**kwargs
):
super(Usage, self).__init__(**kwargs)
self.id = None
self.unit = kwargs['unit']
self.current_value = kwargs['current_value']
self.limit = kwargs['limit']
self.name = kwargs['name']
class UsageName(msrest.serialization.Model):
"""The usage names.
:param value: A string describing the resource name.
:type value: str
:param localized_value: A localized string describing the resource name.
:type localized_value: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UsageName, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.localized_value = kwargs.get('localized_value', None)
class UsagesListResult(msrest.serialization.Model):
"""The list usages operation response.
:param value: The list network resource usages.
:type value: list[~azure.mgmt.network.v2019_09_01.models.Usage]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Usage]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(UsagesListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VerificationIPFlowParameters(msrest.serialization.Model):
"""Parameters that define the IP flow to be verified.
All required parameters must be populated in order to send to Azure.
:param target_resource_id: Required. The ID of the target resource to perform next-hop on.
:type target_resource_id: str
:param direction: Required. The direction of the packet represented as a 5-tuple. Possible
values include: "Inbound", "Outbound".
:type direction: str or ~azure.mgmt.network.v2019_09_01.models.Direction
:param protocol: Required. Protocol to be verified on. Possible values include: "TCP", "UDP".
:type protocol: str or ~azure.mgmt.network.v2019_09_01.models.IpFlowProtocol
:param local_port: Required. The local port. Acceptable values are a single integer in the
range (0-65535). Support for * for the source port, which depends on the direction.
:type local_port: str
:param remote_port: Required. The remote port. Acceptable values are a single integer in the
range (0-65535). Support for * for the source port, which depends on the direction.
:type remote_port: str
:param local_ip_address: Required. The local IP address. Acceptable values are valid IPv4
addresses.
:type local_ip_address: str
:param remote_ip_address: Required. The remote IP address. Acceptable values are valid IPv4
addresses.
:type remote_ip_address: str
:param target_nic_resource_id: The NIC ID. (If VM has multiple NICs and IP forwarding is
enabled on any of them, then this parameter must be specified. Otherwise optional).
:type target_nic_resource_id: str
"""
_validation = {
'target_resource_id': {'required': True},
'direction': {'required': True},
'protocol': {'required': True},
'local_port': {'required': True},
'remote_port': {'required': True},
'local_ip_address': {'required': True},
'remote_ip_address': {'required': True},
}
_attribute_map = {
'target_resource_id': {'key': 'targetResourceId', 'type': 'str'},
'direction': {'key': 'direction', 'type': 'str'},
'protocol': {'key': 'protocol', 'type': 'str'},
'local_port': {'key': 'localPort', 'type': 'str'},
'remote_port': {'key': 'remotePort', 'type': 'str'},
'local_ip_address': {'key': 'localIPAddress', 'type': 'str'},
'remote_ip_address': {'key': 'remoteIPAddress', 'type': 'str'},
'target_nic_resource_id': {'key': 'targetNicResourceId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VerificationIPFlowParameters, self).__init__(**kwargs)
self.target_resource_id = kwargs['target_resource_id']
self.direction = kwargs['direction']
self.protocol = kwargs['protocol']
self.local_port = kwargs['local_port']
self.remote_port = kwargs['remote_port']
self.local_ip_address = kwargs['local_ip_address']
self.remote_ip_address = kwargs['remote_ip_address']
self.target_nic_resource_id = kwargs.get('target_nic_resource_id', None)
class VerificationIPFlowResult(msrest.serialization.Model):
"""Results of IP flow verification on the target resource.
:param access: Indicates whether the traffic is allowed or denied. Possible values include:
"Allow", "Deny".
:type access: str or ~azure.mgmt.network.v2019_09_01.models.Access
:param rule_name: Name of the rule. If input is not matched against any security rule, it is
not displayed.
:type rule_name: str
"""
_attribute_map = {
'access': {'key': 'access', 'type': 'str'},
'rule_name': {'key': 'ruleName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VerificationIPFlowResult, self).__init__(**kwargs)
self.access = kwargs.get('access', None)
self.rule_name = kwargs.get('rule_name', None)
class VirtualHub(Resource):
"""VirtualHub Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_wan: The VirtualWAN to which the VirtualHub belongs.
:type virtual_wan: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param vpn_gateway: The VpnGateway associated with this VirtualHub.
:type vpn_gateway: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param p2_s_vpn_gateway: The P2SVpnGateway associated with this VirtualHub.
:type p2_s_vpn_gateway: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param express_route_gateway: The expressRouteGateway associated with this VirtualHub.
:type express_route_gateway: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param azure_firewall: The azureFirewall associated with this VirtualHub.
:type azure_firewall: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param virtual_network_connections: List of all vnet connections with this VirtualHub.
:type virtual_network_connections:
list[~azure.mgmt.network.v2019_09_01.models.HubVirtualNetworkConnection]
:param address_prefix: Address-prefix for this VirtualHub.
:type address_prefix: str
:param route_table: The routeTable associated with this virtual hub.
:type route_table: ~azure.mgmt.network.v2019_09_01.models.VirtualHubRouteTable
:ivar provisioning_state: The provisioning state of the virtual hub resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param security_provider_name: The Security Provider name.
:type security_provider_name: str
:param virtual_hub_route_table_v2_s: List of all virtual hub route table v2s associated with
this VirtualHub.
:type virtual_hub_route_table_v2_s:
list[~azure.mgmt.network.v2019_09_01.models.VirtualHubRouteTableV2]
:param sku: The sku of this VirtualHub.
:type sku: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_wan': {'key': 'properties.virtualWan', 'type': 'SubResource'},
'vpn_gateway': {'key': 'properties.vpnGateway', 'type': 'SubResource'},
'p2_s_vpn_gateway': {'key': 'properties.p2SVpnGateway', 'type': 'SubResource'},
'express_route_gateway': {'key': 'properties.expressRouteGateway', 'type': 'SubResource'},
'azure_firewall': {'key': 'properties.azureFirewall', 'type': 'SubResource'},
'virtual_network_connections': {'key': 'properties.virtualNetworkConnections', 'type': '[HubVirtualNetworkConnection]'},
'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'},
'route_table': {'key': 'properties.routeTable', 'type': 'VirtualHubRouteTable'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'security_provider_name': {'key': 'properties.securityProviderName', 'type': 'str'},
'virtual_hub_route_table_v2_s': {'key': 'properties.virtualHubRouteTableV2s', 'type': '[VirtualHubRouteTableV2]'},
'sku': {'key': 'properties.sku', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualHub, self).__init__(**kwargs)
self.etag = None
self.virtual_wan = kwargs.get('virtual_wan', None)
self.vpn_gateway = kwargs.get('vpn_gateway', None)
self.p2_s_vpn_gateway = kwargs.get('p2_s_vpn_gateway', None)
self.express_route_gateway = kwargs.get('express_route_gateway', None)
self.azure_firewall = kwargs.get('azure_firewall', None)
self.virtual_network_connections = kwargs.get('virtual_network_connections', None)
self.address_prefix = kwargs.get('address_prefix', None)
self.route_table = kwargs.get('route_table', None)
self.provisioning_state = None
self.security_provider_name = kwargs.get('security_provider_name', None)
self.virtual_hub_route_table_v2_s = kwargs.get('virtual_hub_route_table_v2_s', None)
self.sku = kwargs.get('sku', None)
class VirtualHubId(msrest.serialization.Model):
"""Virtual Hub identifier.
:param id: The resource URI for the Virtual Hub where the ExpressRoute gateway is or will be
deployed. The Virtual Hub resource and the ExpressRoute gateway resource reside in the same
subscription.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualHubId, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
class VirtualHubRoute(msrest.serialization.Model):
"""VirtualHub route.
:param address_prefixes: List of all addressPrefixes.
:type address_prefixes: list[str]
:param next_hop_ip_address: NextHop ip address.
:type next_hop_ip_address: str
"""
_attribute_map = {
'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'},
'next_hop_ip_address': {'key': 'nextHopIpAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualHubRoute, self).__init__(**kwargs)
self.address_prefixes = kwargs.get('address_prefixes', None)
self.next_hop_ip_address = kwargs.get('next_hop_ip_address', None)
class VirtualHubRouteTable(msrest.serialization.Model):
"""VirtualHub route table.
:param routes: List of all routes.
:type routes: list[~azure.mgmt.network.v2019_09_01.models.VirtualHubRoute]
"""
_attribute_map = {
'routes': {'key': 'routes', 'type': '[VirtualHubRoute]'},
}
def __init__(
self,
**kwargs
):
super(VirtualHubRouteTable, self).__init__(**kwargs)
self.routes = kwargs.get('routes', None)
class VirtualHubRouteTableV2(SubResource):
"""VirtualHubRouteTableV2 Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param routes: List of all routes.
:type routes: list[~azure.mgmt.network.v2019_09_01.models.VirtualHubRouteV2]
:param attached_connections: List of all connections attached to this route table v2.
:type attached_connections: list[str]
:ivar provisioning_state: The provisioning state of the virtual hub route table v2 resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'routes': {'key': 'properties.routes', 'type': '[VirtualHubRouteV2]'},
'attached_connections': {'key': 'properties.attachedConnections', 'type': '[str]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualHubRouteTableV2, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.routes = kwargs.get('routes', None)
self.attached_connections = kwargs.get('attached_connections', None)
self.provisioning_state = None
class VirtualHubRouteV2(msrest.serialization.Model):
"""VirtualHubRouteTableV2 route.
:param destination_type: The type of destinations.
:type destination_type: str
:param destinations: List of all destinations.
:type destinations: list[str]
:param next_hop_type: The type of next hops.
:type next_hop_type: str
:param next_hops: NextHops ip address.
:type next_hops: list[str]
"""
_attribute_map = {
'destination_type': {'key': 'destinationType', 'type': 'str'},
'destinations': {'key': 'destinations', 'type': '[str]'},
'next_hop_type': {'key': 'nextHopType', 'type': 'str'},
'next_hops': {'key': 'nextHops', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(VirtualHubRouteV2, self).__init__(**kwargs)
self.destination_type = kwargs.get('destination_type', None)
self.destinations = kwargs.get('destinations', None)
self.next_hop_type = kwargs.get('next_hop_type', None)
self.next_hops = kwargs.get('next_hops', None)
class VirtualNetwork(Resource):
"""Virtual Network resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param address_space: The AddressSpace that contains an array of IP address ranges that can be
used by subnets.
:type address_space: ~azure.mgmt.network.v2019_09_01.models.AddressSpace
:param dhcp_options: The dhcpOptions that contains an array of DNS servers available to VMs
deployed in the virtual network.
:type dhcp_options: ~azure.mgmt.network.v2019_09_01.models.DhcpOptions
:param subnets: A list of subnets in a Virtual Network.
:type subnets: list[~azure.mgmt.network.v2019_09_01.models.Subnet]
:param virtual_network_peerings: A list of peerings in a Virtual Network.
:type virtual_network_peerings:
list[~azure.mgmt.network.v2019_09_01.models.VirtualNetworkPeering]
:ivar resource_guid: The resourceGuid property of the Virtual Network resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the virtual network resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param enable_ddos_protection: Indicates if DDoS protection is enabled for all the protected
resources in the virtual network. It requires a DDoS protection plan associated with the
resource.
:type enable_ddos_protection: bool
:param enable_vm_protection: Indicates if VM protection is enabled for all the subnets in the
virtual network.
:type enable_vm_protection: bool
:param ddos_protection_plan: The DDoS protection plan associated with the virtual network.
:type ddos_protection_plan: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param bgp_communities: Bgp Communities sent over ExpressRoute with each route corresponding to
a prefix in this VNET.
:type bgp_communities: ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkBgpCommunities
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'address_space': {'key': 'properties.addressSpace', 'type': 'AddressSpace'},
'dhcp_options': {'key': 'properties.dhcpOptions', 'type': 'DhcpOptions'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'virtual_network_peerings': {'key': 'properties.virtualNetworkPeerings', 'type': '[VirtualNetworkPeering]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'enable_ddos_protection': {'key': 'properties.enableDdosProtection', 'type': 'bool'},
'enable_vm_protection': {'key': 'properties.enableVmProtection', 'type': 'bool'},
'ddos_protection_plan': {'key': 'properties.ddosProtectionPlan', 'type': 'SubResource'},
'bgp_communities': {'key': 'properties.bgpCommunities', 'type': 'VirtualNetworkBgpCommunities'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetwork, self).__init__(**kwargs)
self.etag = None
self.address_space = kwargs.get('address_space', None)
self.dhcp_options = kwargs.get('dhcp_options', None)
self.subnets = kwargs.get('subnets', None)
self.virtual_network_peerings = kwargs.get('virtual_network_peerings', None)
self.resource_guid = None
self.provisioning_state = None
self.enable_ddos_protection = kwargs.get('enable_ddos_protection', False)
self.enable_vm_protection = kwargs.get('enable_vm_protection', False)
self.ddos_protection_plan = kwargs.get('ddos_protection_plan', None)
self.bgp_communities = kwargs.get('bgp_communities', None)
class VirtualNetworkBgpCommunities(msrest.serialization.Model):
"""Bgp Communities sent over ExpressRoute with each route corresponding to a prefix in this VNET.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param virtual_network_community: Required. The BGP community associated with the virtual
network.
:type virtual_network_community: str
:ivar regional_community: The BGP community associated with the region of the virtual network.
:vartype regional_community: str
"""
_validation = {
'virtual_network_community': {'required': True},
'regional_community': {'readonly': True},
}
_attribute_map = {
'virtual_network_community': {'key': 'virtualNetworkCommunity', 'type': 'str'},
'regional_community': {'key': 'regionalCommunity', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkBgpCommunities, self).__init__(**kwargs)
self.virtual_network_community = kwargs['virtual_network_community']
self.regional_community = None
class VirtualNetworkConnectionGatewayReference(msrest.serialization.Model):
"""A reference to VirtualNetworkGateway or LocalNetworkGateway resource.
All required parameters must be populated in order to send to Azure.
:param id: Required. The ID of VirtualNetworkGateway or LocalNetworkGateway resource.
:type id: str
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkConnectionGatewayReference, self).__init__(**kwargs)
self.id = kwargs['id']
class VirtualNetworkGateway(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param ip_configurations: IP configurations for virtual network gateway.
:type ip_configurations:
list[~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayIPConfiguration]
:param gateway_type: The type of this virtual network gateway. Possible values include: "Vpn",
"ExpressRoute".
:type gateway_type: str or ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayType
:param vpn_type: The type of this virtual network gateway. Possible values include:
"PolicyBased", "RouteBased".
:type vpn_type: str or ~azure.mgmt.network.v2019_09_01.models.VpnType
:param vpn_gateway_generation: The generation for this VirtualNetworkGateway. Must be None if
gatewayType is not VPN. Possible values include: "None", "Generation1", "Generation2".
:type vpn_gateway_generation: str or
~azure.mgmt.network.v2019_09_01.models.VpnGatewayGeneration
:param enable_bgp: Whether BGP is enabled for this virtual network gateway or not.
:type enable_bgp: bool
:param active: ActiveActive flag.
:type active: bool
:param gateway_default_site: The reference of the LocalNetworkGateway resource which represents
local network site having default routes. Assign Null value in case of removing existing
default site setting.
:type gateway_default_site: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param sku: The reference of the VirtualNetworkGatewaySku resource which represents the SKU
selected for Virtual network gateway.
:type sku: ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewaySku
:param vpn_client_configuration: The reference of the VpnClientConfiguration resource which
represents the P2S VpnClient configurations.
:type vpn_client_configuration: ~azure.mgmt.network.v2019_09_01.models.VpnClientConfiguration
:param bgp_settings: Virtual network gateway's BGP speaker settings.
:type bgp_settings: ~azure.mgmt.network.v2019_09_01.models.BgpSettings
:param custom_routes: The reference of the address space resource which represents the custom
routes address space specified by the customer for virtual network gateway and VpnClient.
:type custom_routes: ~azure.mgmt.network.v2019_09_01.models.AddressSpace
:ivar resource_guid: The resource GUID property of the virtual network gateway resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the virtual network gateway resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param enable_dns_forwarding: Whether dns forwarding is enabled or not.
:type enable_dns_forwarding: bool
:ivar inbound_dns_forwarding_endpoint: The IP address allocated by the gateway to which dns
requests can be sent.
:vartype inbound_dns_forwarding_endpoint: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
'inbound_dns_forwarding_endpoint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[VirtualNetworkGatewayIPConfiguration]'},
'gateway_type': {'key': 'properties.gatewayType', 'type': 'str'},
'vpn_type': {'key': 'properties.vpnType', 'type': 'str'},
'vpn_gateway_generation': {'key': 'properties.vpnGatewayGeneration', 'type': 'str'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'active': {'key': 'properties.activeActive', 'type': 'bool'},
'gateway_default_site': {'key': 'properties.gatewayDefaultSite', 'type': 'SubResource'},
'sku': {'key': 'properties.sku', 'type': 'VirtualNetworkGatewaySku'},
'vpn_client_configuration': {'key': 'properties.vpnClientConfiguration', 'type': 'VpnClientConfiguration'},
'bgp_settings': {'key': 'properties.bgpSettings', 'type': 'BgpSettings'},
'custom_routes': {'key': 'properties.customRoutes', 'type': 'AddressSpace'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'enable_dns_forwarding': {'key': 'properties.enableDnsForwarding', 'type': 'bool'},
'inbound_dns_forwarding_endpoint': {'key': 'properties.inboundDnsForwardingEndpoint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGateway, self).__init__(**kwargs)
self.etag = None
self.ip_configurations = kwargs.get('ip_configurations', None)
self.gateway_type = kwargs.get('gateway_type', None)
self.vpn_type = kwargs.get('vpn_type', None)
self.vpn_gateway_generation = kwargs.get('vpn_gateway_generation', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.active = kwargs.get('active', None)
self.gateway_default_site = kwargs.get('gateway_default_site', None)
self.sku = kwargs.get('sku', None)
self.vpn_client_configuration = kwargs.get('vpn_client_configuration', None)
self.bgp_settings = kwargs.get('bgp_settings', None)
self.custom_routes = kwargs.get('custom_routes', None)
self.resource_guid = None
self.provisioning_state = None
self.enable_dns_forwarding = kwargs.get('enable_dns_forwarding', None)
self.inbound_dns_forwarding_endpoint = None
class VirtualNetworkGatewayConnection(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param authorization_key: The authorizationKey.
:type authorization_key: str
:param virtual_network_gateway1: Required. The reference to virtual network gateway resource.
:type virtual_network_gateway1: ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGateway
:param virtual_network_gateway2: The reference to virtual network gateway resource.
:type virtual_network_gateway2: ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGateway
:param local_network_gateway2: The reference to local network gateway resource.
:type local_network_gateway2: ~azure.mgmt.network.v2019_09_01.models.LocalNetworkGateway
:param connection_type: Required. Gateway connection type. Possible values include: "IPsec",
"Vnet2Vnet", "ExpressRoute", "VPNClient".
:type connection_type: str or
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionType
:param connection_protocol: Connection protocol used for this connection. Possible values
include: "IKEv2", "IKEv1".
:type connection_protocol: str or
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionProtocol
:param routing_weight: The routing weight.
:type routing_weight: int
:param shared_key: The IPSec shared key.
:type shared_key: str
:ivar connection_status: Virtual Network Gateway connection status. Possible values include:
"Unknown", "Connecting", "Connected", "NotConnected".
:vartype connection_status: str or
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionStatus
:ivar tunnel_connection_status: Collection of all tunnels' connection health status.
:vartype tunnel_connection_status:
list[~azure.mgmt.network.v2019_09_01.models.TunnelConnectionHealth]
:ivar egress_bytes_transferred: The egress bytes transferred in this connection.
:vartype egress_bytes_transferred: long
:ivar ingress_bytes_transferred: The ingress bytes transferred in this connection.
:vartype ingress_bytes_transferred: long
:param peer: The reference to peerings resource.
:type peer: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param enable_bgp: EnableBgp flag.
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this connection.
:type ipsec_policies: list[~azure.mgmt.network.v2019_09_01.models.IpsecPolicy]
:param traffic_selector_policies: The Traffic Selector Policies to be considered by this
connection.
:type traffic_selector_policies:
list[~azure.mgmt.network.v2019_09_01.models.TrafficSelectorPolicy]
:ivar resource_guid: The resource GUID property of the virtual network gateway connection
resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the virtual network gateway connection
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param express_route_gateway_bypass: Bypass ExpressRoute Gateway for data forwarding.
:type express_route_gateway_bypass: bool
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'virtual_network_gateway1': {'required': True},
'connection_type': {'required': True},
'connection_status': {'readonly': True},
'tunnel_connection_status': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'virtual_network_gateway1': {'key': 'properties.virtualNetworkGateway1', 'type': 'VirtualNetworkGateway'},
'virtual_network_gateway2': {'key': 'properties.virtualNetworkGateway2', 'type': 'VirtualNetworkGateway'},
'local_network_gateway2': {'key': 'properties.localNetworkGateway2', 'type': 'LocalNetworkGateway'},
'connection_type': {'key': 'properties.connectionType', 'type': 'str'},
'connection_protocol': {'key': 'properties.connectionProtocol', 'type': 'str'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'tunnel_connection_status': {'key': 'properties.tunnelConnectionStatus', 'type': '[TunnelConnectionHealth]'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'peer': {'key': 'properties.peer', 'type': 'SubResource'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'traffic_selector_policies': {'key': 'properties.trafficSelectorPolicies', 'type': '[TrafficSelectorPolicy]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'express_route_gateway_bypass': {'key': 'properties.expressRouteGatewayBypass', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayConnection, self).__init__(**kwargs)
self.etag = None
self.authorization_key = kwargs.get('authorization_key', None)
self.virtual_network_gateway1 = kwargs['virtual_network_gateway1']
self.virtual_network_gateway2 = kwargs.get('virtual_network_gateway2', None)
self.local_network_gateway2 = kwargs.get('local_network_gateway2', None)
self.connection_type = kwargs['connection_type']
self.connection_protocol = kwargs.get('connection_protocol', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.shared_key = kwargs.get('shared_key', None)
self.connection_status = None
self.tunnel_connection_status = None
self.egress_bytes_transferred = None
self.ingress_bytes_transferred = None
self.peer = kwargs.get('peer', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.use_policy_based_traffic_selectors = kwargs.get('use_policy_based_traffic_selectors', None)
self.ipsec_policies = kwargs.get('ipsec_policies', None)
self.traffic_selector_policies = kwargs.get('traffic_selector_policies', None)
self.resource_guid = None
self.provisioning_state = None
self.express_route_gateway_bypass = kwargs.get('express_route_gateway_bypass', None)
class VirtualNetworkGatewayConnectionListEntity(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param authorization_key: The authorizationKey.
:type authorization_key: str
:param virtual_network_gateway1: Required. The reference to virtual network gateway resource.
:type virtual_network_gateway1:
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkConnectionGatewayReference
:param virtual_network_gateway2: The reference to virtual network gateway resource.
:type virtual_network_gateway2:
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkConnectionGatewayReference
:param local_network_gateway2: The reference to local network gateway resource.
:type local_network_gateway2:
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkConnectionGatewayReference
:param connection_type: Required. Gateway connection type. Possible values include: "IPsec",
"Vnet2Vnet", "ExpressRoute", "VPNClient".
:type connection_type: str or
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionType
:param connection_protocol: Connection protocol used for this connection. Possible values
include: "IKEv2", "IKEv1".
:type connection_protocol: str or
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionProtocol
:param routing_weight: The routing weight.
:type routing_weight: int
:param shared_key: The IPSec shared key.
:type shared_key: str
:ivar connection_status: Virtual Network Gateway connection status. Possible values include:
"Unknown", "Connecting", "Connected", "NotConnected".
:vartype connection_status: str or
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionStatus
:ivar tunnel_connection_status: Collection of all tunnels' connection health status.
:vartype tunnel_connection_status:
list[~azure.mgmt.network.v2019_09_01.models.TunnelConnectionHealth]
:ivar egress_bytes_transferred: The egress bytes transferred in this connection.
:vartype egress_bytes_transferred: long
:ivar ingress_bytes_transferred: The ingress bytes transferred in this connection.
:vartype ingress_bytes_transferred: long
:param peer: The reference to peerings resource.
:type peer: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param enable_bgp: EnableBgp flag.
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this connection.
:type ipsec_policies: list[~azure.mgmt.network.v2019_09_01.models.IpsecPolicy]
:param traffic_selector_policies: The Traffic Selector Policies to be considered by this
connection.
:type traffic_selector_policies:
list[~azure.mgmt.network.v2019_09_01.models.TrafficSelectorPolicy]
:ivar resource_guid: The resource GUID property of the virtual network gateway connection
resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the virtual network gateway connection
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param express_route_gateway_bypass: Bypass ExpressRoute Gateway for data forwarding.
:type express_route_gateway_bypass: bool
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'virtual_network_gateway1': {'required': True},
'connection_type': {'required': True},
'connection_status': {'readonly': True},
'tunnel_connection_status': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'virtual_network_gateway1': {'key': 'properties.virtualNetworkGateway1', 'type': 'VirtualNetworkConnectionGatewayReference'},
'virtual_network_gateway2': {'key': 'properties.virtualNetworkGateway2', 'type': 'VirtualNetworkConnectionGatewayReference'},
'local_network_gateway2': {'key': 'properties.localNetworkGateway2', 'type': 'VirtualNetworkConnectionGatewayReference'},
'connection_type': {'key': 'properties.connectionType', 'type': 'str'},
'connection_protocol': {'key': 'properties.connectionProtocol', 'type': 'str'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'tunnel_connection_status': {'key': 'properties.tunnelConnectionStatus', 'type': '[TunnelConnectionHealth]'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'peer': {'key': 'properties.peer', 'type': 'SubResource'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'traffic_selector_policies': {'key': 'properties.trafficSelectorPolicies', 'type': '[TrafficSelectorPolicy]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'express_route_gateway_bypass': {'key': 'properties.expressRouteGatewayBypass', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayConnectionListEntity, self).__init__(**kwargs)
self.etag = None
self.authorization_key = kwargs.get('authorization_key', None)
self.virtual_network_gateway1 = kwargs['virtual_network_gateway1']
self.virtual_network_gateway2 = kwargs.get('virtual_network_gateway2', None)
self.local_network_gateway2 = kwargs.get('local_network_gateway2', None)
self.connection_type = kwargs['connection_type']
self.connection_protocol = kwargs.get('connection_protocol', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.shared_key = kwargs.get('shared_key', None)
self.connection_status = None
self.tunnel_connection_status = None
self.egress_bytes_transferred = None
self.ingress_bytes_transferred = None
self.peer = kwargs.get('peer', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.use_policy_based_traffic_selectors = kwargs.get('use_policy_based_traffic_selectors', None)
self.ipsec_policies = kwargs.get('ipsec_policies', None)
self.traffic_selector_policies = kwargs.get('traffic_selector_policies', None)
self.resource_guid = None
self.provisioning_state = None
self.express_route_gateway_bypass = kwargs.get('express_route_gateway_bypass', None)
class VirtualNetworkGatewayConnectionListResult(msrest.serialization.Model):
"""Response for the ListVirtualNetworkGatewayConnections API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of VirtualNetworkGatewayConnection resources that exists in a resource
group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnection]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkGatewayConnection]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayConnectionListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class VirtualNetworkGatewayIPConfiguration(SubResource):
"""IP configuration for virtual network gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param private_ip_allocation_method: The private IP address allocation method. Possible values
include: "Static", "Dynamic".
:type private_ip_allocation_method: str or
~azure.mgmt.network.v2019_09_01.models.IPAllocationMethod
:param subnet: The reference of the subnet resource.
:type subnet: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param public_ip_address: The reference of the public IP resource.
:type public_ip_address: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar provisioning_state: The provisioning state of the virtual network gateway IP
configuration resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'SubResource'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayIPConfiguration, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.private_ip_allocation_method = kwargs.get('private_ip_allocation_method', None)
self.subnet = kwargs.get('subnet', None)
self.public_ip_address = kwargs.get('public_ip_address', None)
self.provisioning_state = None
class VirtualNetworkGatewayListConnectionsResult(msrest.serialization.Model):
"""Response for the VirtualNetworkGatewayListConnections API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of VirtualNetworkGatewayConnection resources that exists in a resource
group.
:type value:
list[~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionListEntity]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkGatewayConnectionListEntity]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayListConnectionsResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class VirtualNetworkGatewayListResult(msrest.serialization.Model):
"""Response for the ListVirtualNetworkGateways API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: A list of VirtualNetworkGateway resources that exists in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGateway]
:ivar next_link: The URL to get the next set of results.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkGateway]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewayListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class VirtualNetworkGatewaySku(msrest.serialization.Model):
"""VirtualNetworkGatewaySku details.
Variables are only populated by the server, and will be ignored when sending a request.
:param name: Gateway SKU name. Possible values include: "Basic", "HighPerformance", "Standard",
"UltraPerformance", "VpnGw1", "VpnGw2", "VpnGw3", "VpnGw4", "VpnGw5", "VpnGw1AZ", "VpnGw2AZ",
"VpnGw3AZ", "VpnGw4AZ", "VpnGw5AZ", "ErGw1AZ", "ErGw2AZ", "ErGw3AZ".
:type name: str or ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewaySkuName
:param tier: Gateway SKU tier. Possible values include: "Basic", "HighPerformance", "Standard",
"UltraPerformance", "VpnGw1", "VpnGw2", "VpnGw3", "VpnGw4", "VpnGw5", "VpnGw1AZ", "VpnGw2AZ",
"VpnGw3AZ", "VpnGw4AZ", "VpnGw5AZ", "ErGw1AZ", "ErGw2AZ", "ErGw3AZ".
:type tier: str or ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewaySkuTier
:ivar capacity: The capacity.
:vartype capacity: int
"""
_validation = {
'capacity': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkGatewaySku, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.capacity = None
class VirtualNetworkListResult(msrest.serialization.Model):
"""Response for the ListVirtualNetworks API service call.
:param value: A list of VirtualNetwork resources in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualNetwork]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetwork]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VirtualNetworkListUsageResult(msrest.serialization.Model):
"""Response for the virtual networks GetUsage API service call.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: VirtualNetwork usage stats.
:vartype value: list[~azure.mgmt.network.v2019_09_01.models.VirtualNetworkUsage]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkUsage]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkListUsageResult, self).__init__(**kwargs)
self.value = None
self.next_link = kwargs.get('next_link', None)
class VirtualNetworkPeering(SubResource):
"""Peerings in a virtual network resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param allow_virtual_network_access: Whether the VMs in the local virtual network space would
be able to access the VMs in remote virtual network space.
:type allow_virtual_network_access: bool
:param allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the local virtual
network will be allowed/disallowed in remote virtual network.
:type allow_forwarded_traffic: bool
:param allow_gateway_transit: If gateway links can be used in remote virtual networking to link
to this virtual network.
:type allow_gateway_transit: bool
:param use_remote_gateways: If remote gateways can be used on this virtual network. If the flag
is set to true, and allowGatewayTransit on remote peering is also true, virtual network will
use gateways of remote virtual network for transit. Only one peering can have this flag set to
true. This flag cannot be set if virtual network already has a gateway.
:type use_remote_gateways: bool
:param remote_virtual_network: The reference of the remote virtual network. The remote virtual
network can be in the same or different region (preview). See here to register for the preview
and learn more (https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-
peering).
:type remote_virtual_network: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param remote_address_space: The reference of the remote virtual network address space.
:type remote_address_space: ~azure.mgmt.network.v2019_09_01.models.AddressSpace
:param peering_state: The status of the virtual network peering. Possible values include:
"Initiated", "Connected", "Disconnected".
:type peering_state: str or ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkPeeringState
:ivar provisioning_state: The provisioning state of the virtual network peering resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'},
'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'},
'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'},
'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'remote_address_space': {'key': 'properties.remoteAddressSpace', 'type': 'AddressSpace'},
'peering_state': {'key': 'properties.peeringState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkPeering, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.allow_virtual_network_access = kwargs.get('allow_virtual_network_access', None)
self.allow_forwarded_traffic = kwargs.get('allow_forwarded_traffic', None)
self.allow_gateway_transit = kwargs.get('allow_gateway_transit', None)
self.use_remote_gateways = kwargs.get('use_remote_gateways', None)
self.remote_virtual_network = kwargs.get('remote_virtual_network', None)
self.remote_address_space = kwargs.get('remote_address_space', None)
self.peering_state = kwargs.get('peering_state', None)
self.provisioning_state = None
class VirtualNetworkPeeringListResult(msrest.serialization.Model):
"""Response for ListSubnets API service call. Retrieves all subnets that belong to a virtual network.
:param value: The peerings in a virtual network.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualNetworkPeering]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkPeering]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkPeeringListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VirtualNetworkTap(Resource):
"""Virtual Network Tap resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar network_interface_tap_configurations: Specifies the list of resource IDs for the network
interface IP configuration that needs to be tapped.
:vartype network_interface_tap_configurations:
list[~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceTapConfiguration]
:ivar resource_guid: The resource GUID property of the virtual network tap resource.
:vartype resource_guid: str
:ivar provisioning_state: The provisioning state of the virtual network tap resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param destination_network_interface_ip_configuration: The reference to the private IP Address
of the collector nic that will receive the tap.
:type destination_network_interface_ip_configuration:
~azure.mgmt.network.v2019_09_01.models.NetworkInterfaceIPConfiguration
:param destination_load_balancer_front_end_ip_configuration: The reference to the private IP
address on the internal Load Balancer that will receive the tap.
:type destination_load_balancer_front_end_ip_configuration:
~azure.mgmt.network.v2019_09_01.models.FrontendIPConfiguration
:param destination_port: The VXLAN destination port that will receive the tapped traffic.
:type destination_port: int
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'network_interface_tap_configurations': {'readonly': True},
'resource_guid': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'network_interface_tap_configurations': {'key': 'properties.networkInterfaceTapConfigurations', 'type': '[NetworkInterfaceTapConfiguration]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'destination_network_interface_ip_configuration': {'key': 'properties.destinationNetworkInterfaceIPConfiguration', 'type': 'NetworkInterfaceIPConfiguration'},
'destination_load_balancer_front_end_ip_configuration': {'key': 'properties.destinationLoadBalancerFrontEndIPConfiguration', 'type': 'FrontendIPConfiguration'},
'destination_port': {'key': 'properties.destinationPort', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkTap, self).__init__(**kwargs)
self.etag = None
self.network_interface_tap_configurations = None
self.resource_guid = None
self.provisioning_state = None
self.destination_network_interface_ip_configuration = kwargs.get('destination_network_interface_ip_configuration', None)
self.destination_load_balancer_front_end_ip_configuration = kwargs.get('destination_load_balancer_front_end_ip_configuration', None)
self.destination_port = kwargs.get('destination_port', None)
class VirtualNetworkTapListResult(msrest.serialization.Model):
"""Response for ListVirtualNetworkTap API service call.
:param value: A list of VirtualNetworkTaps in a resource group.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualNetworkTap]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualNetworkTap]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkTapListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VirtualNetworkUsage(msrest.serialization.Model):
"""Usage details for subnet.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar current_value: Indicates number of IPs used from the Subnet.
:vartype current_value: float
:ivar id: Subnet identifier.
:vartype id: str
:ivar limit: Indicates the size of the subnet.
:vartype limit: float
:ivar name: The name containing common and localized value for usage.
:vartype name: ~azure.mgmt.network.v2019_09_01.models.VirtualNetworkUsageName
:ivar unit: Usage units. Returns 'Count'.
:vartype unit: str
"""
_validation = {
'current_value': {'readonly': True},
'id': {'readonly': True},
'limit': {'readonly': True},
'name': {'readonly': True},
'unit': {'readonly': True},
}
_attribute_map = {
'current_value': {'key': 'currentValue', 'type': 'float'},
'id': {'key': 'id', 'type': 'str'},
'limit': {'key': 'limit', 'type': 'float'},
'name': {'key': 'name', 'type': 'VirtualNetworkUsageName'},
'unit': {'key': 'unit', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkUsage, self).__init__(**kwargs)
self.current_value = None
self.id = None
self.limit = None
self.name = None
self.unit = None
class VirtualNetworkUsageName(msrest.serialization.Model):
"""Usage strings container.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar localized_value: Localized subnet size and usage string.
:vartype localized_value: str
:ivar value: Subnet size and usage string.
:vartype value: str
"""
_validation = {
'localized_value': {'readonly': True},
'value': {'readonly': True},
}
_attribute_map = {
'localized_value': {'key': 'localizedValue', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualNetworkUsageName, self).__init__(**kwargs)
self.localized_value = None
self.value = None
class VirtualRouter(Resource):
"""VirtualRouter Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_router_asn: VirtualRouter ASN.
:type virtual_router_asn: long
:param virtual_router_ips: VirtualRouter IPs.
:type virtual_router_ips: list[str]
:param hosted_subnet: The Subnet on which VirtualRouter is hosted.
:type hosted_subnet: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param hosted_gateway: The Gateway on which VirtualRouter is hosted.
:type hosted_gateway: ~azure.mgmt.network.v2019_09_01.models.SubResource
:ivar peerings: List of references to VirtualRouterPeerings.
:vartype peerings: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'virtual_router_asn': {'maximum': 4294967295, 'minimum': 0},
'peerings': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_router_asn': {'key': 'properties.virtualRouterAsn', 'type': 'long'},
'virtual_router_ips': {'key': 'properties.virtualRouterIps', 'type': '[str]'},
'hosted_subnet': {'key': 'properties.hostedSubnet', 'type': 'SubResource'},
'hosted_gateway': {'key': 'properties.hostedGateway', 'type': 'SubResource'},
'peerings': {'key': 'properties.peerings', 'type': '[SubResource]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualRouter, self).__init__(**kwargs)
self.etag = None
self.virtual_router_asn = kwargs.get('virtual_router_asn', None)
self.virtual_router_ips = kwargs.get('virtual_router_ips', None)
self.hosted_subnet = kwargs.get('hosted_subnet', None)
self.hosted_gateway = kwargs.get('hosted_gateway', None)
self.peerings = None
self.provisioning_state = None
class VirtualRouterListResult(msrest.serialization.Model):
"""Response for ListVirtualRouters API service call.
:param value: List of Virtual Routers.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualRouter]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualRouter]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualRouterListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VirtualRouterPeering(SubResource):
"""Virtual Router Peering resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the virtual router peering that is unique within a virtual router.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Peering type.
:vartype type: str
:param peer_asn: Peer ASN.
:type peer_asn: long
:param peer_ip: Peer IP.
:type peer_ip: str
:ivar provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'peer_asn': {'maximum': 4294967295, 'minimum': 0},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'peer_asn': {'key': 'properties.peerAsn', 'type': 'long'},
'peer_ip': {'key': 'properties.peerIp', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualRouterPeering, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.peer_asn = kwargs.get('peer_asn', None)
self.peer_ip = kwargs.get('peer_ip', None)
self.provisioning_state = None
class VirtualRouterPeeringListResult(msrest.serialization.Model):
"""Response for ListVirtualRouterPeerings API service call.
:param value: List of VirtualRouterPeerings in a VirtualRouter.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VirtualRouterPeering]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VirtualRouterPeering]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualRouterPeeringListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
class VirtualWAN(Resource):
"""VirtualWAN Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param disable_vpn_encryption: Vpn encryption to be disabled or not.
:type disable_vpn_encryption: bool
:ivar virtual_hubs: List of VirtualHubs in the VirtualWAN.
:vartype virtual_hubs: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar vpn_sites: List of VpnSites in the VirtualWAN.
:vartype vpn_sites: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:param allow_branch_to_branch_traffic: True if branch to branch traffic is allowed.
:type allow_branch_to_branch_traffic: bool
:param allow_vnet_to_vnet_traffic: True if Vnet to Vnet traffic is allowed.
:type allow_vnet_to_vnet_traffic: bool
:ivar office365_local_breakout_category: The office local breakout category. Possible values
include: "Optimize", "OptimizeAndAllow", "All", "None".
:vartype office365_local_breakout_category: str or
~azure.mgmt.network.v2019_09_01.models.OfficeTrafficCategory
:ivar provisioning_state: The provisioning state of the virtual WAN resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param type_properties_type: The type of the VirtualWAN.
:type type_properties_type: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'virtual_hubs': {'readonly': True},
'vpn_sites': {'readonly': True},
'office365_local_breakout_category': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'disable_vpn_encryption': {'key': 'properties.disableVpnEncryption', 'type': 'bool'},
'virtual_hubs': {'key': 'properties.virtualHubs', 'type': '[SubResource]'},
'vpn_sites': {'key': 'properties.vpnSites', 'type': '[SubResource]'},
'allow_branch_to_branch_traffic': {'key': 'properties.allowBranchToBranchTraffic', 'type': 'bool'},
'allow_vnet_to_vnet_traffic': {'key': 'properties.allowVnetToVnetTraffic', 'type': 'bool'},
'office365_local_breakout_category': {'key': 'properties.office365LocalBreakoutCategory', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'type_properties_type': {'key': 'properties.type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualWAN, self).__init__(**kwargs)
self.etag = None
self.disable_vpn_encryption = kwargs.get('disable_vpn_encryption', None)
self.virtual_hubs = None
self.vpn_sites = None
self.allow_branch_to_branch_traffic = kwargs.get('allow_branch_to_branch_traffic', None)
self.allow_vnet_to_vnet_traffic = kwargs.get('allow_vnet_to_vnet_traffic', None)
self.office365_local_breakout_category = None
self.provisioning_state = None
self.type_properties_type = kwargs.get('type_properties_type', None)
class VirtualWanSecurityProvider(msrest.serialization.Model):
"""Collection of SecurityProviders.
Variables are only populated by the server, and will be ignored when sending a request.
:param name: Name of the security provider.
:type name: str
:param url: Url of the security provider.
:type url: str
:ivar type: Name of the security provider. Possible values include: "External", "Native".
:vartype type: str or ~azure.mgmt.network.v2019_09_01.models.VirtualWanSecurityProviderType
"""
_validation = {
'type': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualWanSecurityProvider, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.url = kwargs.get('url', None)
self.type = None
class VirtualWanSecurityProviders(msrest.serialization.Model):
"""Collection of SecurityProviders.
:param supported_providers: List of VirtualWAN security providers.
:type supported_providers:
list[~azure.mgmt.network.v2019_09_01.models.VirtualWanSecurityProvider]
"""
_attribute_map = {
'supported_providers': {'key': 'supportedProviders', 'type': '[VirtualWanSecurityProvider]'},
}
def __init__(
self,
**kwargs
):
super(VirtualWanSecurityProviders, self).__init__(**kwargs)
self.supported_providers = kwargs.get('supported_providers', None)
class VirtualWanVpnProfileParameters(msrest.serialization.Model):
"""Virtual Wan Vpn profile parameters Vpn profile generation.
:param vpn_server_configuration_resource_id: VpnServerConfiguration partial resource uri with
which VirtualWan is associated to.
:type vpn_server_configuration_resource_id: str
:param authentication_method: VPN client authentication method. Possible values include:
"EAPTLS", "EAPMSCHAPv2".
:type authentication_method: str or ~azure.mgmt.network.v2019_09_01.models.AuthenticationMethod
"""
_attribute_map = {
'vpn_server_configuration_resource_id': {'key': 'vpnServerConfigurationResourceId', 'type': 'str'},
'authentication_method': {'key': 'authenticationMethod', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VirtualWanVpnProfileParameters, self).__init__(**kwargs)
self.vpn_server_configuration_resource_id = kwargs.get('vpn_server_configuration_resource_id', None)
self.authentication_method = kwargs.get('authentication_method', None)
class VM(Resource):
"""Describes a Virtual Machine.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
**kwargs
):
super(VM, self).__init__(**kwargs)
class VpnClientConfiguration(msrest.serialization.Model):
"""VpnClientConfiguration for P2S client.
:param vpn_client_address_pool: The reference of the address space resource which represents
Address space for P2S VpnClient.
:type vpn_client_address_pool: ~azure.mgmt.network.v2019_09_01.models.AddressSpace
:param vpn_client_root_certificates: VpnClientRootCertificate for virtual network gateway.
:type vpn_client_root_certificates:
list[~azure.mgmt.network.v2019_09_01.models.VpnClientRootCertificate]
:param vpn_client_revoked_certificates: VpnClientRevokedCertificate for Virtual network
gateway.
:type vpn_client_revoked_certificates:
list[~azure.mgmt.network.v2019_09_01.models.VpnClientRevokedCertificate]
:param vpn_client_protocols: VpnClientProtocols for Virtual network gateway.
:type vpn_client_protocols: list[str or
~azure.mgmt.network.v2019_09_01.models.VpnClientProtocol]
:param vpn_client_ipsec_policies: VpnClientIpsecPolicies for virtual network gateway P2S
client.
:type vpn_client_ipsec_policies: list[~azure.mgmt.network.v2019_09_01.models.IpsecPolicy]
:param radius_server_address: The radius server address property of the VirtualNetworkGateway
resource for vpn client connection.
:type radius_server_address: str
:param radius_server_secret: The radius secret property of the VirtualNetworkGateway resource
for vpn client connection.
:type radius_server_secret: str
:param aad_tenant: The AADTenant property of the VirtualNetworkGateway resource for vpn client
connection used for AAD authentication.
:type aad_tenant: str
:param aad_audience: The AADAudience property of the VirtualNetworkGateway resource for vpn
client connection used for AAD authentication.
:type aad_audience: str
:param aad_issuer: The AADIssuer property of the VirtualNetworkGateway resource for vpn client
connection used for AAD authentication.
:type aad_issuer: str
"""
_attribute_map = {
'vpn_client_address_pool': {'key': 'vpnClientAddressPool', 'type': 'AddressSpace'},
'vpn_client_root_certificates': {'key': 'vpnClientRootCertificates', 'type': '[VpnClientRootCertificate]'},
'vpn_client_revoked_certificates': {'key': 'vpnClientRevokedCertificates', 'type': '[VpnClientRevokedCertificate]'},
'vpn_client_protocols': {'key': 'vpnClientProtocols', 'type': '[str]'},
'vpn_client_ipsec_policies': {'key': 'vpnClientIpsecPolicies', 'type': '[IpsecPolicy]'},
'radius_server_address': {'key': 'radiusServerAddress', 'type': 'str'},
'radius_server_secret': {'key': 'radiusServerSecret', 'type': 'str'},
'aad_tenant': {'key': 'aadTenant', 'type': 'str'},
'aad_audience': {'key': 'aadAudience', 'type': 'str'},
'aad_issuer': {'key': 'aadIssuer', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnClientConfiguration, self).__init__(**kwargs)
self.vpn_client_address_pool = kwargs.get('vpn_client_address_pool', None)
self.vpn_client_root_certificates = kwargs.get('vpn_client_root_certificates', None)
self.vpn_client_revoked_certificates = kwargs.get('vpn_client_revoked_certificates', None)
self.vpn_client_protocols = kwargs.get('vpn_client_protocols', None)
self.vpn_client_ipsec_policies = kwargs.get('vpn_client_ipsec_policies', None)
self.radius_server_address = kwargs.get('radius_server_address', None)
self.radius_server_secret = kwargs.get('radius_server_secret', None)
self.aad_tenant = kwargs.get('aad_tenant', None)
self.aad_audience = kwargs.get('aad_audience', None)
self.aad_issuer = kwargs.get('aad_issuer', None)
class VpnClientConnectionHealth(msrest.serialization.Model):
"""VpnClientConnectionHealth properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar total_ingress_bytes_transferred: Total of the Ingress Bytes Transferred in this P2S Vpn
connection.
:vartype total_ingress_bytes_transferred: long
:ivar total_egress_bytes_transferred: Total of the Egress Bytes Transferred in this connection.
:vartype total_egress_bytes_transferred: long
:param vpn_client_connections_count: The total of p2s vpn clients connected at this time to
this P2SVpnGateway.
:type vpn_client_connections_count: int
:param allocated_ip_addresses: List of allocated ip addresses to the connected p2s vpn clients.
:type allocated_ip_addresses: list[str]
"""
_validation = {
'total_ingress_bytes_transferred': {'readonly': True},
'total_egress_bytes_transferred': {'readonly': True},
}
_attribute_map = {
'total_ingress_bytes_transferred': {'key': 'totalIngressBytesTransferred', 'type': 'long'},
'total_egress_bytes_transferred': {'key': 'totalEgressBytesTransferred', 'type': 'long'},
'vpn_client_connections_count': {'key': 'vpnClientConnectionsCount', 'type': 'int'},
'allocated_ip_addresses': {'key': 'allocatedIpAddresses', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(VpnClientConnectionHealth, self).__init__(**kwargs)
self.total_ingress_bytes_transferred = None
self.total_egress_bytes_transferred = None
self.vpn_client_connections_count = kwargs.get('vpn_client_connections_count', None)
self.allocated_ip_addresses = kwargs.get('allocated_ip_addresses', None)
class VpnClientConnectionHealthDetail(msrest.serialization.Model):
"""VPN client connection health detail.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar vpn_connection_id: The vpn client Id.
:vartype vpn_connection_id: str
:ivar vpn_connection_duration: The duration time of a connected vpn client.
:vartype vpn_connection_duration: long
:ivar vpn_connection_time: The start time of a connected vpn client.
:vartype vpn_connection_time: str
:ivar public_ip_address: The public Ip of a connected vpn client.
:vartype public_ip_address: str
:ivar private_ip_address: The assigned private Ip of a connected vpn client.
:vartype private_ip_address: str
:ivar vpn_user_name: The user name of a connected vpn client.
:vartype vpn_user_name: str
:ivar max_bandwidth: The max band width.
:vartype max_bandwidth: long
:ivar egress_packets_transferred: The egress packets per second.
:vartype egress_packets_transferred: long
:ivar egress_bytes_transferred: The egress bytes per second.
:vartype egress_bytes_transferred: long
:ivar ingress_packets_transferred: The ingress packets per second.
:vartype ingress_packets_transferred: long
:ivar ingress_bytes_transferred: The ingress bytes per second.
:vartype ingress_bytes_transferred: long
:ivar max_packets_per_second: The max packets transferred per second.
:vartype max_packets_per_second: long
"""
_validation = {
'vpn_connection_id': {'readonly': True},
'vpn_connection_duration': {'readonly': True},
'vpn_connection_time': {'readonly': True},
'public_ip_address': {'readonly': True},
'private_ip_address': {'readonly': True},
'vpn_user_name': {'readonly': True},
'max_bandwidth': {'readonly': True},
'egress_packets_transferred': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'ingress_packets_transferred': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'max_packets_per_second': {'readonly': True},
}
_attribute_map = {
'vpn_connection_id': {'key': 'vpnConnectionId', 'type': 'str'},
'vpn_connection_duration': {'key': 'vpnConnectionDuration', 'type': 'long'},
'vpn_connection_time': {'key': 'vpnConnectionTime', 'type': 'str'},
'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
'vpn_user_name': {'key': 'vpnUserName', 'type': 'str'},
'max_bandwidth': {'key': 'maxBandwidth', 'type': 'long'},
'egress_packets_transferred': {'key': 'egressPacketsTransferred', 'type': 'long'},
'egress_bytes_transferred': {'key': 'egressBytesTransferred', 'type': 'long'},
'ingress_packets_transferred': {'key': 'ingressPacketsTransferred', 'type': 'long'},
'ingress_bytes_transferred': {'key': 'ingressBytesTransferred', 'type': 'long'},
'max_packets_per_second': {'key': 'maxPacketsPerSecond', 'type': 'long'},
}
def __init__(
self,
**kwargs
):
super(VpnClientConnectionHealthDetail, self).__init__(**kwargs)
self.vpn_connection_id = None
self.vpn_connection_duration = None
self.vpn_connection_time = None
self.public_ip_address = None
self.private_ip_address = None
self.vpn_user_name = None
self.max_bandwidth = None
self.egress_packets_transferred = None
self.egress_bytes_transferred = None
self.ingress_packets_transferred = None
self.ingress_bytes_transferred = None
self.max_packets_per_second = None
class VpnClientConnectionHealthDetailListResult(msrest.serialization.Model):
"""List of virtual network gateway vpn client connection health.
:param value: List of vpn client connection health.
:type value: list[~azure.mgmt.network.v2019_09_01.models.VpnClientConnectionHealthDetail]
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[VpnClientConnectionHealthDetail]'},
}
def __init__(
self,
**kwargs
):
super(VpnClientConnectionHealthDetailListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
class VpnClientIPsecParameters(msrest.serialization.Model):
"""An IPSec parameters for a virtual network gateway P2S connection.
All required parameters must be populated in order to send to Azure.
:param sa_life_time_seconds: Required. The IPSec Security Association (also called Quick Mode
or Phase 2 SA) lifetime in seconds for P2S client.
:type sa_life_time_seconds: int
:param sa_data_size_kilobytes: Required. The IPSec Security Association (also called Quick Mode
or Phase 2 SA) payload size in KB for P2S client..
:type sa_data_size_kilobytes: int
:param ipsec_encryption: Required. The IPSec encryption algorithm (IKE phase 1). Possible
values include: "None", "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES128", "GCMAES192",
"GCMAES256".
:type ipsec_encryption: str or ~azure.mgmt.network.v2019_09_01.models.IpsecEncryption
:param ipsec_integrity: Required. The IPSec integrity algorithm (IKE phase 1). Possible values
include: "MD5", "SHA1", "SHA256", "GCMAES128", "GCMAES192", "GCMAES256".
:type ipsec_integrity: str or ~azure.mgmt.network.v2019_09_01.models.IpsecIntegrity
:param ike_encryption: Required. The IKE encryption algorithm (IKE phase 2). Possible values
include: "DES", "DES3", "AES128", "AES192", "AES256", "GCMAES256", "GCMAES128".
:type ike_encryption: str or ~azure.mgmt.network.v2019_09_01.models.IkeEncryption
:param ike_integrity: Required. The IKE integrity algorithm (IKE phase 2). Possible values
include: "MD5", "SHA1", "SHA256", "SHA384", "GCMAES256", "GCMAES128".
:type ike_integrity: str or ~azure.mgmt.network.v2019_09_01.models.IkeIntegrity
:param dh_group: Required. The DH Group used in IKE Phase 1 for initial SA. Possible values
include: "None", "DHGroup1", "DHGroup2", "DHGroup14", "DHGroup2048", "ECP256", "ECP384",
"DHGroup24".
:type dh_group: str or ~azure.mgmt.network.v2019_09_01.models.DhGroup
:param pfs_group: Required. The Pfs Group used in IKE Phase 2 for new child SA. Possible values
include: "None", "PFS1", "PFS2", "PFS2048", "ECP256", "ECP384", "PFS24", "PFS14", "PFSMM".
:type pfs_group: str or ~azure.mgmt.network.v2019_09_01.models.PfsGroup
"""
_validation = {
'sa_life_time_seconds': {'required': True},
'sa_data_size_kilobytes': {'required': True},
'ipsec_encryption': {'required': True},
'ipsec_integrity': {'required': True},
'ike_encryption': {'required': True},
'ike_integrity': {'required': True},
'dh_group': {'required': True},
'pfs_group': {'required': True},
}
_attribute_map = {
'sa_life_time_seconds': {'key': 'saLifeTimeSeconds', 'type': 'int'},
'sa_data_size_kilobytes': {'key': 'saDataSizeKilobytes', 'type': 'int'},
'ipsec_encryption': {'key': 'ipsecEncryption', 'type': 'str'},
'ipsec_integrity': {'key': 'ipsecIntegrity', 'type': 'str'},
'ike_encryption': {'key': 'ikeEncryption', 'type': 'str'},
'ike_integrity': {'key': 'ikeIntegrity', 'type': 'str'},
'dh_group': {'key': 'dhGroup', 'type': 'str'},
'pfs_group': {'key': 'pfsGroup', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnClientIPsecParameters, self).__init__(**kwargs)
self.sa_life_time_seconds = kwargs['sa_life_time_seconds']
self.sa_data_size_kilobytes = kwargs['sa_data_size_kilobytes']
self.ipsec_encryption = kwargs['ipsec_encryption']
self.ipsec_integrity = kwargs['ipsec_integrity']
self.ike_encryption = kwargs['ike_encryption']
self.ike_integrity = kwargs['ike_integrity']
self.dh_group = kwargs['dh_group']
self.pfs_group = kwargs['pfs_group']
class VpnClientParameters(msrest.serialization.Model):
"""Vpn Client Parameters for package generation.
:param processor_architecture: VPN client Processor Architecture. Possible values include:
"Amd64", "X86".
:type processor_architecture: str or
~azure.mgmt.network.v2019_09_01.models.ProcessorArchitecture
:param authentication_method: VPN client authentication method. Possible values include:
"EAPTLS", "EAPMSCHAPv2".
:type authentication_method: str or ~azure.mgmt.network.v2019_09_01.models.AuthenticationMethod
:param radius_server_auth_certificate: The public certificate data for the radius server
authentication certificate as a Base-64 encoded string. Required only if external radius
authentication has been configured with EAPTLS authentication.
:type radius_server_auth_certificate: str
:param client_root_certificates: A list of client root certificates public certificate data
encoded as Base-64 strings. Optional parameter for external radius based authentication with
EAPTLS.
:type client_root_certificates: list[str]
"""
_attribute_map = {
'processor_architecture': {'key': 'processorArchitecture', 'type': 'str'},
'authentication_method': {'key': 'authenticationMethod', 'type': 'str'},
'radius_server_auth_certificate': {'key': 'radiusServerAuthCertificate', 'type': 'str'},
'client_root_certificates': {'key': 'clientRootCertificates', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(VpnClientParameters, self).__init__(**kwargs)
self.processor_architecture = kwargs.get('processor_architecture', None)
self.authentication_method = kwargs.get('authentication_method', None)
self.radius_server_auth_certificate = kwargs.get('radius_server_auth_certificate', None)
self.client_root_certificates = kwargs.get('client_root_certificates', None)
class VpnClientRevokedCertificate(SubResource):
"""VPN client revoked certificate of virtual network gateway.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param thumbprint: The revoked VPN client certificate thumbprint.
:type thumbprint: str
:ivar provisioning_state: The provisioning state of the VPN client revoked certificate
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnClientRevokedCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.thumbprint = kwargs.get('thumbprint', None)
self.provisioning_state = None
class VpnClientRootCertificate(SubResource):
"""VPN client root certificate of virtual network gateway.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param public_cert_data: Required. The certificate public data.
:type public_cert_data: str
:ivar provisioning_state: The provisioning state of the VPN client root certificate resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'public_cert_data': {'required': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'public_cert_data': {'key': 'properties.publicCertData', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnClientRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.public_cert_data = kwargs['public_cert_data']
self.provisioning_state = None
class VpnConnection(SubResource):
"""VpnConnection Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param remote_vpn_site: Id of the connected vpn site.
:type remote_vpn_site: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param routing_weight: Routing weight for vpn connection.
:type routing_weight: int
:ivar connection_status: The connection status. Possible values include: "Unknown",
"Connecting", "Connected", "NotConnected".
:vartype connection_status: str or ~azure.mgmt.network.v2019_09_01.models.VpnConnectionStatus
:param vpn_connection_protocol_type: Connection protocol used for this connection. Possible
values include: "IKEv2", "IKEv1".
:type vpn_connection_protocol_type: str or
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionProtocol
:ivar ingress_bytes_transferred: Ingress bytes transferred.
:vartype ingress_bytes_transferred: long
:ivar egress_bytes_transferred: Egress bytes transferred.
:vartype egress_bytes_transferred: long
:param connection_bandwidth: Expected bandwidth in MBPS.
:type connection_bandwidth: int
:param shared_key: SharedKey for the vpn connection.
:type shared_key: str
:param enable_bgp: EnableBgp flag.
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this connection.
:type ipsec_policies: list[~azure.mgmt.network.v2019_09_01.models.IpsecPolicy]
:param enable_rate_limiting: EnableBgp flag.
:type enable_rate_limiting: bool
:param enable_internet_security: Enable internet security.
:type enable_internet_security: bool
:param use_local_azure_ip_address: Use local azure ip to initiate connection.
:type use_local_azure_ip_address: bool
:ivar provisioning_state: The provisioning state of the VPN connection resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param vpn_link_connections: List of all vpn site link connections to the gateway.
:type vpn_link_connections: list[~azure.mgmt.network.v2019_09_01.models.VpnSiteLinkConnection]
"""
_validation = {
'etag': {'readonly': True},
'connection_status': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'remote_vpn_site': {'key': 'properties.remoteVpnSite', 'type': 'SubResource'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'vpn_connection_protocol_type': {'key': 'properties.vpnConnectionProtocolType', 'type': 'str'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'connection_bandwidth': {'key': 'properties.connectionBandwidth', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'enable_rate_limiting': {'key': 'properties.enableRateLimiting', 'type': 'bool'},
'enable_internet_security': {'key': 'properties.enableInternetSecurity', 'type': 'bool'},
'use_local_azure_ip_address': {'key': 'properties.useLocalAzureIpAddress', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'vpn_link_connections': {'key': 'properties.vpnLinkConnections', 'type': '[VpnSiteLinkConnection]'},
}
def __init__(
self,
**kwargs
):
super(VpnConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.remote_vpn_site = kwargs.get('remote_vpn_site', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.connection_status = None
self.vpn_connection_protocol_type = kwargs.get('vpn_connection_protocol_type', None)
self.ingress_bytes_transferred = None
self.egress_bytes_transferred = None
self.connection_bandwidth = kwargs.get('connection_bandwidth', None)
self.shared_key = kwargs.get('shared_key', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.use_policy_based_traffic_selectors = kwargs.get('use_policy_based_traffic_selectors', None)
self.ipsec_policies = kwargs.get('ipsec_policies', None)
self.enable_rate_limiting = kwargs.get('enable_rate_limiting', None)
self.enable_internet_security = kwargs.get('enable_internet_security', None)
self.use_local_azure_ip_address = kwargs.get('use_local_azure_ip_address', None)
self.provisioning_state = None
self.vpn_link_connections = kwargs.get('vpn_link_connections', None)
class VpnDeviceScriptParameters(msrest.serialization.Model):
"""Vpn device configuration script generation parameters.
:param vendor: The vendor for the vpn device.
:type vendor: str
:param device_family: The device family for the vpn device.
:type device_family: str
:param firmware_version: The firmware version for the vpn device.
:type firmware_version: str
"""
_attribute_map = {
'vendor': {'key': 'vendor', 'type': 'str'},
'device_family': {'key': 'deviceFamily', 'type': 'str'},
'firmware_version': {'key': 'firmwareVersion', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnDeviceScriptParameters, self).__init__(**kwargs)
self.vendor = kwargs.get('vendor', None)
self.device_family = kwargs.get('device_family', None)
self.firmware_version = kwargs.get('firmware_version', None)
class VpnGateway(Resource):
"""VpnGateway Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_hub: The VirtualHub to which the gateway belongs.
:type virtual_hub: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param connections: List of all vpn connections to the gateway.
:type connections: list[~azure.mgmt.network.v2019_09_01.models.VpnConnection]
:param bgp_settings: Local network gateway's BGP speaker settings.
:type bgp_settings: ~azure.mgmt.network.v2019_09_01.models.BgpSettings
:ivar provisioning_state: The provisioning state of the VPN gateway resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param vpn_gateway_scale_unit: The scale unit for this vpn gateway.
:type vpn_gateway_scale_unit: int
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_hub': {'key': 'properties.virtualHub', 'type': 'SubResource'},
'connections': {'key': 'properties.connections', 'type': '[VpnConnection]'},
'bgp_settings': {'key': 'properties.bgpSettings', 'type': 'BgpSettings'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'vpn_gateway_scale_unit': {'key': 'properties.vpnGatewayScaleUnit', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(VpnGateway, self).__init__(**kwargs)
self.etag = None
self.virtual_hub = kwargs.get('virtual_hub', None)
self.connections = kwargs.get('connections', None)
self.bgp_settings = kwargs.get('bgp_settings', None)
self.provisioning_state = None
self.vpn_gateway_scale_unit = kwargs.get('vpn_gateway_scale_unit', None)
class VpnLinkBgpSettings(msrest.serialization.Model):
"""BGP settings details for a link.
:param asn: The BGP speaker's ASN.
:type asn: long
:param bgp_peering_address: The BGP peering address and BGP identifier of this BGP speaker.
:type bgp_peering_address: str
"""
_attribute_map = {
'asn': {'key': 'asn', 'type': 'long'},
'bgp_peering_address': {'key': 'bgpPeeringAddress', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnLinkBgpSettings, self).__init__(**kwargs)
self.asn = kwargs.get('asn', None)
self.bgp_peering_address = kwargs.get('bgp_peering_address', None)
class VpnLinkProviderProperties(msrest.serialization.Model):
"""List of properties of a link provider.
:param link_provider_name: Name of the link provider.
:type link_provider_name: str
:param link_speed_in_mbps: Link speed.
:type link_speed_in_mbps: int
"""
_attribute_map = {
'link_provider_name': {'key': 'linkProviderName', 'type': 'str'},
'link_speed_in_mbps': {'key': 'linkSpeedInMbps', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(VpnLinkProviderProperties, self).__init__(**kwargs)
self.link_provider_name = kwargs.get('link_provider_name', None)
self.link_speed_in_mbps = kwargs.get('link_speed_in_mbps', None)
class VpnPacketCaptureStartParameters(msrest.serialization.Model):
"""Start packet capture parameters on virtual network gateway.
:param filter_data: Start Packet capture parameters.
:type filter_data: str
"""
_attribute_map = {
'filter_data': {'key': 'filterData', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnPacketCaptureStartParameters, self).__init__(**kwargs)
self.filter_data = kwargs.get('filter_data', None)
class VpnPacketCaptureStopParameters(msrest.serialization.Model):
"""Stop packet capture parameters.
:param sas_url: SAS url for packet capture on virtual network gateway.
:type sas_url: str
"""
_attribute_map = {
'sas_url': {'key': 'sasUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnPacketCaptureStopParameters, self).__init__(**kwargs)
self.sas_url = kwargs.get('sas_url', None)
class VpnProfileResponse(msrest.serialization.Model):
"""Vpn Profile Response for package generation.
:param profile_url: URL to the VPN profile.
:type profile_url: str
"""
_attribute_map = {
'profile_url': {'key': 'profileUrl', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnProfileResponse, self).__init__(**kwargs)
self.profile_url = kwargs.get('profile_url', None)
class VpnServerConfigRadiusClientRootCertificate(msrest.serialization.Model):
"""Properties of the Radius client root certificate of VpnServerConfiguration.
:param name: The certificate name.
:type name: str
:param thumbprint: The Radius client root certificate thumbprint.
:type thumbprint: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnServerConfigRadiusClientRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.thumbprint = kwargs.get('thumbprint', None)
class VpnServerConfigRadiusServerRootCertificate(msrest.serialization.Model):
"""Properties of Radius Server root certificate of VpnServerConfiguration.
:param name: The certificate name.
:type name: str
:param public_cert_data: The certificate public data.
:type public_cert_data: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'public_cert_data': {'key': 'publicCertData', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnServerConfigRadiusServerRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.public_cert_data = kwargs.get('public_cert_data', None)
class VpnServerConfiguration(Resource):
"""VpnServerConfiguration Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param name_properties_name: The name of the VpnServerConfiguration that is unique within a
resource group.
:type name_properties_name: str
:param vpn_protocols: VPN protocols for the VpnServerConfiguration.
:type vpn_protocols: list[str or
~azure.mgmt.network.v2019_09_01.models.VpnGatewayTunnelingProtocol]
:param vpn_authentication_types: VPN authentication types for the VpnServerConfiguration.
:type vpn_authentication_types: list[str or
~azure.mgmt.network.v2019_09_01.models.VpnAuthenticationType]
:param vpn_client_root_certificates: VPN client root certificate of VpnServerConfiguration.
:type vpn_client_root_certificates:
list[~azure.mgmt.network.v2019_09_01.models.VpnServerConfigVpnClientRootCertificate]
:param vpn_client_revoked_certificates: VPN client revoked certificate of
VpnServerConfiguration.
:type vpn_client_revoked_certificates:
list[~azure.mgmt.network.v2019_09_01.models.VpnServerConfigVpnClientRevokedCertificate]
:param radius_server_root_certificates: Radius Server root certificate of
VpnServerConfiguration.
:type radius_server_root_certificates:
list[~azure.mgmt.network.v2019_09_01.models.VpnServerConfigRadiusServerRootCertificate]
:param radius_client_root_certificates: Radius client root certificate of
VpnServerConfiguration.
:type radius_client_root_certificates:
list[~azure.mgmt.network.v2019_09_01.models.VpnServerConfigRadiusClientRootCertificate]
:param vpn_client_ipsec_policies: VpnClientIpsecPolicies for VpnServerConfiguration.
:type vpn_client_ipsec_policies: list[~azure.mgmt.network.v2019_09_01.models.IpsecPolicy]
:param radius_server_address: The radius server address property of the VpnServerConfiguration
resource for point to site client connection.
:type radius_server_address: str
:param radius_server_secret: The radius secret property of the VpnServerConfiguration resource
for point to site client connection.
:type radius_server_secret: str
:param aad_authentication_parameters: The set of aad vpn authentication parameters.
:type aad_authentication_parameters:
~azure.mgmt.network.v2019_09_01.models.AadAuthenticationParameters
:ivar provisioning_state: The provisioning state of the VpnServerConfiguration resource.
Possible values are: 'Updating', 'Deleting', and 'Failed'.
:vartype provisioning_state: str
:ivar p2_s_vpn_gateways: List of references to P2SVpnGateways.
:vartype p2_s_vpn_gateways: list[~azure.mgmt.network.v2019_09_01.models.P2SVpnGateway]
:ivar etag_properties_etag: A unique read-only string that changes whenever the resource is
updated.
:vartype etag_properties_etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
'p2_s_vpn_gateways': {'readonly': True},
'etag_properties_etag': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'vpn_protocols': {'key': 'properties.vpnProtocols', 'type': '[str]'},
'vpn_authentication_types': {'key': 'properties.vpnAuthenticationTypes', 'type': '[str]'},
'vpn_client_root_certificates': {'key': 'properties.vpnClientRootCertificates', 'type': '[VpnServerConfigVpnClientRootCertificate]'},
'vpn_client_revoked_certificates': {'key': 'properties.vpnClientRevokedCertificates', 'type': '[VpnServerConfigVpnClientRevokedCertificate]'},
'radius_server_root_certificates': {'key': 'properties.radiusServerRootCertificates', 'type': '[VpnServerConfigRadiusServerRootCertificate]'},
'radius_client_root_certificates': {'key': 'properties.radiusClientRootCertificates', 'type': '[VpnServerConfigRadiusClientRootCertificate]'},
'vpn_client_ipsec_policies': {'key': 'properties.vpnClientIpsecPolicies', 'type': '[IpsecPolicy]'},
'radius_server_address': {'key': 'properties.radiusServerAddress', 'type': 'str'},
'radius_server_secret': {'key': 'properties.radiusServerSecret', 'type': 'str'},
'aad_authentication_parameters': {'key': 'properties.aadAuthenticationParameters', 'type': 'AadAuthenticationParameters'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'p2_s_vpn_gateways': {'key': 'properties.p2SVpnGateways', 'type': '[P2SVpnGateway]'},
'etag_properties_etag': {'key': 'properties.etag', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnServerConfiguration, self).__init__(**kwargs)
self.etag = None
self.name_properties_name = kwargs.get('name_properties_name', None)
self.vpn_protocols = kwargs.get('vpn_protocols', None)
self.vpn_authentication_types = kwargs.get('vpn_authentication_types', None)
self.vpn_client_root_certificates = kwargs.get('vpn_client_root_certificates', None)
self.vpn_client_revoked_certificates = kwargs.get('vpn_client_revoked_certificates', None)
self.radius_server_root_certificates = kwargs.get('radius_server_root_certificates', None)
self.radius_client_root_certificates = kwargs.get('radius_client_root_certificates', None)
self.vpn_client_ipsec_policies = kwargs.get('vpn_client_ipsec_policies', None)
self.radius_server_address = kwargs.get('radius_server_address', None)
self.radius_server_secret = kwargs.get('radius_server_secret', None)
self.aad_authentication_parameters = kwargs.get('aad_authentication_parameters', None)
self.provisioning_state = None
self.p2_s_vpn_gateways = None
self.etag_properties_etag = None
class VpnServerConfigurationsResponse(msrest.serialization.Model):
"""VpnServerConfigurations list associated with VirtualWan Response.
:param vpn_server_configuration_resource_ids: List of VpnServerConfigurations associated with
VirtualWan.
:type vpn_server_configuration_resource_ids: list[str]
"""
_attribute_map = {
'vpn_server_configuration_resource_ids': {'key': 'vpnServerConfigurationResourceIds', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(VpnServerConfigurationsResponse, self).__init__(**kwargs)
self.vpn_server_configuration_resource_ids = kwargs.get('vpn_server_configuration_resource_ids', None)
class VpnServerConfigVpnClientRevokedCertificate(msrest.serialization.Model):
"""Properties of the revoked VPN client certificate of VpnServerConfiguration.
:param name: The certificate name.
:type name: str
:param thumbprint: The revoked VPN client certificate thumbprint.
:type thumbprint: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnServerConfigVpnClientRevokedCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.thumbprint = kwargs.get('thumbprint', None)
class VpnServerConfigVpnClientRootCertificate(msrest.serialization.Model):
"""Properties of VPN client root certificate of VpnServerConfiguration.
:param name: The certificate name.
:type name: str
:param public_cert_data: The certificate public data.
:type public_cert_data: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'public_cert_data': {'key': 'publicCertData', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnServerConfigVpnClientRootCertificate, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.public_cert_data = kwargs.get('public_cert_data', None)
class VpnSite(Resource):
"""VpnSite Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param virtual_wan: The VirtualWAN to which the vpnSite belongs.
:type virtual_wan: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param device_properties: The device properties.
:type device_properties: ~azure.mgmt.network.v2019_09_01.models.DeviceProperties
:param ip_address: The ip-address for the vpn-site.
:type ip_address: str
:param site_key: The key for vpn-site that can be used for connections.
:type site_key: str
:param address_space: The AddressSpace that contains an array of IP address ranges.
:type address_space: ~azure.mgmt.network.v2019_09_01.models.AddressSpace
:param bgp_properties: The set of bgp properties.
:type bgp_properties: ~azure.mgmt.network.v2019_09_01.models.BgpSettings
:ivar provisioning_state: The provisioning state of the VPN site resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:param is_security_site: IsSecuritySite flag.
:type is_security_site: bool
:param vpn_site_links: List of all vpn site links.
:type vpn_site_links: list[~azure.mgmt.network.v2019_09_01.models.VpnSiteLink]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'virtual_wan': {'key': 'properties.virtualWan', 'type': 'SubResource'},
'device_properties': {'key': 'properties.deviceProperties', 'type': 'DeviceProperties'},
'ip_address': {'key': 'properties.ipAddress', 'type': 'str'},
'site_key': {'key': 'properties.siteKey', 'type': 'str'},
'address_space': {'key': 'properties.addressSpace', 'type': 'AddressSpace'},
'bgp_properties': {'key': 'properties.bgpProperties', 'type': 'BgpSettings'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'is_security_site': {'key': 'properties.isSecuritySite', 'type': 'bool'},
'vpn_site_links': {'key': 'properties.vpnSiteLinks', 'type': '[VpnSiteLink]'},
}
def __init__(
self,
**kwargs
):
super(VpnSite, self).__init__(**kwargs)
self.etag = None
self.virtual_wan = kwargs.get('virtual_wan', None)
self.device_properties = kwargs.get('device_properties', None)
self.ip_address = kwargs.get('ip_address', None)
self.site_key = kwargs.get('site_key', None)
self.address_space = kwargs.get('address_space', None)
self.bgp_properties = kwargs.get('bgp_properties', None)
self.provisioning_state = None
self.is_security_site = kwargs.get('is_security_site', None)
self.vpn_site_links = kwargs.get('vpn_site_links', None)
class VpnSiteId(msrest.serialization.Model):
"""VpnSite Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar vpn_site: The resource-uri of the vpn-site for which config is to be fetched.
:vartype vpn_site: str
"""
_validation = {
'vpn_site': {'readonly': True},
}
_attribute_map = {
'vpn_site': {'key': 'vpnSite', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnSiteId, self).__init__(**kwargs)
self.vpn_site = None
class VpnSiteLink(SubResource):
"""VpnSiteLink Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar type: Resource type.
:vartype type: str
:param link_properties: The link provider properties.
:type link_properties: ~azure.mgmt.network.v2019_09_01.models.VpnLinkProviderProperties
:param ip_address: The ip-address for the vpn-site-link.
:type ip_address: str
:param bgp_properties: The set of bgp properties.
:type bgp_properties: ~azure.mgmt.network.v2019_09_01.models.VpnLinkBgpSettings
:ivar provisioning_state: The provisioning state of the VPN site link resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'link_properties': {'key': 'properties.linkProperties', 'type': 'VpnLinkProviderProperties'},
'ip_address': {'key': 'properties.ipAddress', 'type': 'str'},
'bgp_properties': {'key': 'properties.bgpProperties', 'type': 'VpnLinkBgpSettings'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnSiteLink, self).__init__(**kwargs)
self.etag = None
self.name = kwargs.get('name', None)
self.type = None
self.link_properties = kwargs.get('link_properties', None)
self.ip_address = kwargs.get('ip_address', None)
self.bgp_properties = kwargs.get('bgp_properties', None)
self.provisioning_state = None
class VpnSiteLinkConnection(SubResource):
"""VpnSiteLinkConnection Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Resource type.
:vartype type: str
:param vpn_site_link: Id of the connected vpn site link.
:type vpn_site_link: ~azure.mgmt.network.v2019_09_01.models.SubResource
:param routing_weight: Routing weight for vpn connection.
:type routing_weight: int
:ivar connection_status: The connection status. Possible values include: "Unknown",
"Connecting", "Connected", "NotConnected".
:vartype connection_status: str or ~azure.mgmt.network.v2019_09_01.models.VpnConnectionStatus
:param vpn_connection_protocol_type: Connection protocol used for this connection. Possible
values include: "IKEv2", "IKEv1".
:type vpn_connection_protocol_type: str or
~azure.mgmt.network.v2019_09_01.models.VirtualNetworkGatewayConnectionProtocol
:ivar ingress_bytes_transferred: Ingress bytes transferred.
:vartype ingress_bytes_transferred: long
:ivar egress_bytes_transferred: Egress bytes transferred.
:vartype egress_bytes_transferred: long
:param connection_bandwidth: Expected bandwidth in MBPS.
:type connection_bandwidth: int
:param shared_key: SharedKey for the vpn connection.
:type shared_key: str
:param enable_bgp: EnableBgp flag.
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this connection.
:type ipsec_policies: list[~azure.mgmt.network.v2019_09_01.models.IpsecPolicy]
:param enable_rate_limiting: EnableBgp flag.
:type enable_rate_limiting: bool
:param use_local_azure_ip_address: Use local azure ip to initiate connection.
:type use_local_azure_ip_address: bool
:ivar provisioning_state: The provisioning state of the VPN site link connection resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
"""
_validation = {
'etag': {'readonly': True},
'type': {'readonly': True},
'connection_status': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vpn_site_link': {'key': 'properties.vpnSiteLink', 'type': 'SubResource'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'vpn_connection_protocol_type': {'key': 'properties.vpnConnectionProtocolType', 'type': 'str'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'connection_bandwidth': {'key': 'properties.connectionBandwidth', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'enable_rate_limiting': {'key': 'properties.enableRateLimiting', 'type': 'bool'},
'use_local_azure_ip_address': {'key': 'properties.useLocalAzureIpAddress', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(VpnSiteLinkConnection, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.type = None
self.vpn_site_link = kwargs.get('vpn_site_link', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.connection_status = None
self.vpn_connection_protocol_type = kwargs.get('vpn_connection_protocol_type', None)
self.ingress_bytes_transferred = None
self.egress_bytes_transferred = None
self.connection_bandwidth = kwargs.get('connection_bandwidth', None)
self.shared_key = kwargs.get('shared_key', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.use_policy_based_traffic_selectors = kwargs.get('use_policy_based_traffic_selectors', None)
self.ipsec_policies = kwargs.get('ipsec_policies', None)
self.enable_rate_limiting = kwargs.get('enable_rate_limiting', None)
self.use_local_azure_ip_address = kwargs.get('use_local_azure_ip_address', None)
self.provisioning_state = None
class WebApplicationFirewallCustomRule(msrest.serialization.Model):
"""Defines contents of a web application rule.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param name: The name of the resource that is unique within a policy. This name can be used to
access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param priority: Required. Describes priority of the rule. Rules with a lower value will be
evaluated before rules with a higher value.
:type priority: int
:param rule_type: Required. Describes type of rule. Possible values include: "MatchRule",
"Invalid".
:type rule_type: str or ~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallRuleType
:param match_conditions: Required. List of match conditions.
:type match_conditions: list[~azure.mgmt.network.v2019_09_01.models.MatchCondition]
:param action: Required. Type of Actions. Possible values include: "Allow", "Block", "Log".
:type action: str or ~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallAction
"""
_validation = {
'name': {'max_length': 128, 'min_length': 0},
'etag': {'readonly': True},
'priority': {'required': True},
'rule_type': {'required': True},
'match_conditions': {'required': True},
'action': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'rule_type': {'key': 'ruleType', 'type': 'str'},
'match_conditions': {'key': 'matchConditions', 'type': '[MatchCondition]'},
'action': {'key': 'action', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebApplicationFirewallCustomRule, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.etag = None
self.priority = kwargs['priority']
self.rule_type = kwargs['rule_type']
self.match_conditions = kwargs['match_conditions']
self.action = kwargs['action']
class WebApplicationFirewallPolicy(Resource):
"""Defines web application firewall policy.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param policy_settings: Describes policySettings for policy.
:type policy_settings: ~azure.mgmt.network.v2019_09_01.models.PolicySettings
:param custom_rules: Describes custom rules inside the policy.
:type custom_rules:
list[~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallCustomRule]
:ivar application_gateways: A collection of references to application gateways.
:vartype application_gateways: list[~azure.mgmt.network.v2019_09_01.models.ApplicationGateway]
:ivar provisioning_state: The provisioning state of the web application firewall policy
resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
:ivar resource_state: Resource status of the policy. Possible values include: "Creating",
"Enabling", "Enabled", "Disabling", "Disabled", "Deleting".
:vartype resource_state: str or
~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallPolicyResourceState
:param managed_rules: Describes the managedRules structure.
:type managed_rules: ~azure.mgmt.network.v2019_09_01.models.ManagedRulesDefinition
:ivar http_listeners: A collection of references to application gateway http listeners.
:vartype http_listeners: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
:ivar path_based_rules: A collection of references to application gateway path rules.
:vartype path_based_rules: list[~azure.mgmt.network.v2019_09_01.models.SubResource]
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'etag': {'readonly': True},
'application_gateways': {'readonly': True},
'provisioning_state': {'readonly': True},
'resource_state': {'readonly': True},
'http_listeners': {'readonly': True},
'path_based_rules': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'policy_settings': {'key': 'properties.policySettings', 'type': 'PolicySettings'},
'custom_rules': {'key': 'properties.customRules', 'type': '[WebApplicationFirewallCustomRule]'},
'application_gateways': {'key': 'properties.applicationGateways', 'type': '[ApplicationGateway]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'resource_state': {'key': 'properties.resourceState', 'type': 'str'},
'managed_rules': {'key': 'properties.managedRules', 'type': 'ManagedRulesDefinition'},
'http_listeners': {'key': 'properties.httpListeners', 'type': '[SubResource]'},
'path_based_rules': {'key': 'properties.pathBasedRules', 'type': '[SubResource]'},
}
def __init__(
self,
**kwargs
):
super(WebApplicationFirewallPolicy, self).__init__(**kwargs)
self.etag = None
self.policy_settings = kwargs.get('policy_settings', None)
self.custom_rules = kwargs.get('custom_rules', None)
self.application_gateways = None
self.provisioning_state = None
self.resource_state = None
self.managed_rules = kwargs.get('managed_rules', None)
self.http_listeners = None
self.path_based_rules = None
class WebApplicationFirewallPolicyListResult(msrest.serialization.Model):
"""Result of the request to list WebApplicationFirewallPolicies. It contains a list of WebApplicationFirewallPolicy objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of WebApplicationFirewallPolicies within a resource group.
:vartype value: list[~azure.mgmt.network.v2019_09_01.models.WebApplicationFirewallPolicy]
:ivar next_link: URL to get the next set of WebApplicationFirewallPolicy objects if there are
any.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[WebApplicationFirewallPolicy]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(WebApplicationFirewallPolicyListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
| 41.863496
| 540
| 0.671835
|
eecbf9b205495bd8e42f4a34c3cdfbc0a1e502bd
| 18,978
|
py
|
Python
|
src/satori/ars/model.py
|
satoriproject/satori-problems
|
8032725dad5d35b909029e1005b972608bc4b732
|
[
"MIT"
] | null | null | null |
src/satori/ars/model.py
|
satoriproject/satori-problems
|
8032725dad5d35b909029e1005b972608bc4b732
|
[
"MIT"
] | null | null | null |
src/satori/ars/model.py
|
satoriproject/satori-problems
|
8032725dad5d35b909029e1005b972608bc4b732
|
[
"MIT"
] | null | null | null |
# vim:ts=4:sts=4:sw=4:expandtab
"""An in-memory model for an exported service.
"""
import six
from datetime import datetime
from time import mktime
from types import FunctionType
from satori.objects import Argument, DispatchOn
NoneType = type(None)
class ArsElement(object):
"""Abstract. A base class for ARS model elements.
"""
pass
class ArsNamedElement(ArsElement):
"""Abstract. A base class for ARS model elements that have a name.
"""
@Argument('name', type=str)
def __init__(self, name):
super(ArsNamedElement, self).__init__()
self.name = name
def __str__(self):
return self.__class__.__name__ + ':' + self.name
class ArsType(ArsElement):
"""Abstract. A base class for ARS data types.
"""
def __init__(self):
super(ArsType, self).__init__()
self.converter = None
def do_needs_conversion(self):
return False
def do_convert_to_ars(self, value):
return value
def do_convert_from_ars(self, value):
return value
def needs_conversion(self):
if self.converter is not None:
return self.converter.needs_conversion()
else:
return self.do_needs_conversion()
def convert_to_ars(self, value):
if value is None:
return None
if not self.needs_conversion():
return value
if self.converter is not None:
return self.converter.convert_to_ars(value)
else:
return self.do_convert_to_ars(value)
def convert_from_ars(self, value):
if value is None:
return None
if not self.needs_conversion():
return value
if self.converter is not None:
return self.converter.convert_from_ars(value)
else:
return self.do_convert_from_ars(value)
class ArsNamedType(ArsNamedElement, ArsType):
"""Abstract. An ArsType that has a name.
"""
def __init__(self, name):
super(ArsNamedType, self).__init__(name)
class ArsAtomicType(ArsNamedType):
"""An ArsType without (visible) internal structure.
"""
def __init__(self, name):
super(ArsAtomicType, self).__init__(name)
ArsBoolean = ArsAtomicType(name='ArsBoolean')
ArsInt8 = ArsAtomicType(name='ArsInt8')
ArsInt16 = ArsAtomicType(name='ArsInt16')
ArsInt32 = ArsAtomicType(name='ArsInt32')
ArsInt64 = ArsAtomicType(name='ArsInt64')
ArsFloat = ArsAtomicType(name='ArsFloat')
ArsVoid = ArsAtomicType(name='ArsVoid')
ArsBinary = ArsAtomicType(name='ArsBinary')
class ArsStringType(ArsAtomicType):
def __init__(self, name):
super(ArsStringType, self).__init__(name)
def do_needs_conversion(self):
return True
def do_convert_to_ars(self, value):
if isinstance(value, six.text_type):
return value.encode('utf-8')
else:
return value
def do_convert_from_ars(self, value):
if isinstance(value, six.binary_type):
return six.text_type(value, 'utf-8')
else:
return value
ArsString = ArsStringType(name='ArsString')
class ArsTypeAlias(ArsNamedType):
"""A named alias for an ArsType.
"""
@Argument('target_type', type=ArsType)
def __init__(self, name, target_type):
super(ArsTypeAlias, self).__init__(name)
self.target_type = target_type
def do_needs_conversion(self):
return self.target_type.needs_conversion()
def do_convert_to_ars(self, value):
return self.target_type.convert_to_ars(value)
def do_convert_from_ars(self, value):
return self.target_type.convert_from_ars(value)
class ArsList(ArsType):
"""An ArsType representing a list.
"""
@Argument('element_type', type=ArsType)
def __init__(self, element_type):
super(ArsList, self).__init__()
self.element_type = element_type
def __str__(self):
return 'ArsList<'+str(self.element_type)+'>'
def do_needs_conversion(self):
return self.element_type.needs_conversion()
def do_convert_to_ars(self, value):
return [self.element_type.convert_to_ars(elem) for elem in value]
def do_convert_from_ars(self, value):
return [self.element_type.convert_from_ars(elem) for elem in value]
class ArsSet(ArsType):
"""An ArsType representing set.
"""
@Argument('element_type', type=ArsType)
def __init__(self, element_type):
super(ArsSet, self).__init__()
self.element_type = element_type
def __str__(self):
return 'ArsSet<'+str(self.element_type)+'>'
def do_needs_conversion(self):
return self.element_type.needs_conversion()
def do_convert_to_ars(self, value):
new_value = set()
for elem in value:
new_value.add(self.element_type.convert_to_ars(elem))
return new_value
def do_convert_from_ars(self, value):
new_value = set()
for elem in value:
new_value.add(self.element_type.convert_from_ars(elem))
return new_value
class ArsMap(ArsType):
"""An ArsType representing a key-value mapping.
"""
@Argument('key_type', type=ArsType)
@Argument('value_type', type=ArsType)
def __init__(self, key_type, value_type):
super(ArsMap, self).__init__()
self.key_type = key_type
self.value_type = value_type
def __str__(self):
return 'ArsMap<'+str(self.key_type)+','+str(self.value_type)+'>'
def do_needs_conversion(self):
return self.key_type.needs_conversion() or self.value_type.needs_conversion()
def do_convert_to_ars(self, value):
new_value = dict()
for (key, elem) in value.iteritems():
new_value[self.key_type.convert_to_ars(key)] = self.value_type.convert_to_ars(elem)
return new_value
def do_convert_from_ars(self, value):
new_value = dict()
for (key, elem) in value.iteritems():
new_value[self.key_type.convert_from_ars(key)] = self.value_type.convert_from_ars(elem)
return new_value
class ArsNamedTuple(object):
"""A list of ArsNamedElements that have unique names. Something like an ordered dictionary.
"""
def __init__(self):
super(ArsNamedTuple, self).__init__()
self.names = dict()
self.items = list()
@Argument('item', type=ArsNamedElement)
def append(self, item):
if item.name in self:
if self[item.name] == item:
return
else:
raise ValueError('Duplicate item name')
self.names[item.name] = item
self.items.append(item)
def extend(self, tuple):
items = list()
for item in tuple:
if item.name in self:
if self[item.name] == item:
pass
else:
raise ValueError('Duplicate item name')
else:
items.append(item)
for item in items:
self.append(item)
def __contains__(self, elem):
if isinstance(elem, six.string_types):
return elem in self.names
elif isinstance(elem, ArsNamedElement):
return (elem.name in self.names) and (self.names[elem.name] == elem)
else:
return False
def __iter__(self):
return self.items.__iter__()
def __len__(self):
return self.items.__len__()
def __getitem__(self, index):
if isinstance(index, six.string_types):
return self.names[index]
elif isinstance(index, int):
return self.items[index]
else:
raise TypeError('ArsNamedTuple index should be int or str')
def __str__(self):
return 'ArsNamedTuple[' + ','.join(str(item) for item in self.items) + ']'
class ArsField(ArsNamedElement):
"""A single field of an ArsStructure.
"""
@Argument('type', type=ArsType)
@Argument('optional', type=bool)
def __init__(self, name, type, optional=False): # pylint: disable-msg=W0622
super(ArsField, self).__init__(name)
self.type = type
self.optional = optional
class ArsStructureBase(object):
def __init__(self, dict_=None, **kwargs):
super(ArsStructureBase, self).__init__()
if dict_:
kwargs.update(dict_)
for field_name in self._ars_type.fields.names:
if field_name in kwargs:
setattr(self, field_name, kwargs.pop(field_name))
else:
setattr(self, field_name, None)
if kwargs:
raise TypeError('__init__() got an unexpected keyword argument \'{0}\''.format(kwargs.keys()[0]))
@classmethod
def ars_type(cls):
return cls._ars_type
def __setitem__(self, key, value):
return setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def __delitem__(self, key):
return delattr(self, key)
def __contains__(self, key):
return hasattr(self, key)
class ArsStructure(ArsNamedType):
"""An ArsType that represents a named structure.
"""
@Argument('base_index', type=int)
def __init__(self, name, base_index=1):
super(ArsStructure, self).__init__(name)
self.fields = ArsNamedTuple()
self.base_index = base_index
@Argument('field', type=ArsField)
def add_field(self, field=None, **kwargs):
if field is None:
field = ArsField(**kwargs)
self.fields.append(field)
def do_needs_conversion(self):
return True
# return any(field.type.needs_conversion() for field in self.fields.items)
def do_convert_to_ars(self, value):
if isinstance(value, dict):
value = self.get_class()(value)
for field in self.fields.items:
if hasattr(value, field.name) and field.type.needs_conversion():
setattr(value, field.name, field.type.convert_to_ars(getattr(value, field.name)))
return value
def do_convert_from_ars(self, value):
for field in self.fields.items:
if hasattr(value, field.name) and field.type.needs_conversion():
setattr(value, field.name, field.type.convert_from_ars(getattr(value, field.name)))
return value
def get_class(self):
if not hasattr(self, '_class'):
self._class = type(self.name, (ArsStructureBase,), {'_ars_type': self})
return self._class
class ArsParameter(ArsField):
"""A single parameter of ArsProcedure.
"""
def __init__(self, name, type, optional=False): # pylint: disable-msg=W0622
super(ArsParameter, self).__init__(name, type, optional)
class ArsProcedure(ArsNamedElement):
"""A procedure that can be remotely called.
"""
@Argument('return_type', type=ArsType)
@Argument('implementation', type=(FunctionType,NoneType))
def __init__(self, name, return_type, implementation=None):
super(ArsProcedure, self).__init__(name)
self.return_type = return_type
self.implementation = implementation
self.parameters = ArsNamedTuple()
self.exception_types = []
self.parameters_struct = ArsStructure(name + '_args')
self.results_struct = ArsStructure(name + '_result', 0)
self.results_struct.add_field(name='result', type=return_type, optional=True)
@Argument('parameter', type=ArsParameter)
def add_parameter(self, parameter=None, **kwargs):
if parameter is None:
parameter = ArsParameter(**kwargs)
self.parameters.append(parameter)
self.parameters_struct.add_field(parameter)
def add_exception(self, exception_type):
self.exception_types.append(exception_type)
self.results_struct.add_field(name='error'+str(len(self.exception_types)), type=exception_type, optional=True)
class ArsExceptionBase(Exception):
def __init__(self, dict_=None, **kwargs):
super(ArsExceptionBase, self).__init__()
if dict_:
kwargs.update(dict_)
for field_name in self._ars_type.fields.names:
if field_name in kwargs:
setattr(self, field_name, kwargs.pop(field_name))
else:
setattr(self, field_name, None)
if kwargs:
raise TypeError('__init__() got an unexpected keyword argument \'{0}\''.format(kwargs.keys()[0]))
def __str__(self):
if ('message' in self._ars_type.fields) and (self.message is not None):
return self.message
else:
return ', '.join(getattr(self, field_name) for field_name in self._ars_type.fields.names if getattr(self, field_name) is not None)
@classmethod
def ars_type(cls):
return cls._ars_type
class ArsException(ArsStructure):
def get_class(self):
if not hasattr(self, '_class'):
self._class = type(self.name, (ArsExceptionBase,), {'_ars_type': self})
return self._class
class ArsService(ArsNamedElement):
"""A group of ArsProcedures.
"""
def __init__(self, name, base=None):
super(ArsService, self).__init__(name)
self.base = base
self.procedures = ArsNamedTuple()
@Argument('procedure', type=ArsProcedure)
def add_procedure(self, procedure=None, **kwargs):
if procedure is None:
procedure = ArsProcedure(**kwargs)
self.procedures.append(procedure)
class ArsConstant(ArsNamedElement):
"""An element with a constant value.
"""
@Argument('type', type=ArsType)
def __init__(self, name, type, value):
super(ArsConstant, self).__init__(name)
self.type = type
self.value = value
class ArsInterface(ArsElement):
"""A group of ArsNamedTypes, ArsConstants and ArsServices.
"""
def __init__(self):
self.types = ArsNamedTuple()
self.constants = ArsNamedTuple()
self.services = ArsNamedTuple()
def add_type(self, type):
if isinstance(type, ArsAtomicType):
pass
elif isinstance(type, ArsTypeAlias):
if type not in self.types:
self.add_type(type.target_type)
self.types.append(type)
elif isinstance(type, ArsList):
self.add_type(type.element_type)
elif isinstance(type, ArsSet):
self.add_type(type.element_type)
elif isinstance(type, ArsMap):
self.add_type(type.key_type)
self.add_type(type.value_type)
elif isinstance(type, ArsStructure):
if type not in self.types:
for field in type.fields:
self.add_type(field.type)
self.types.append(type)
else:
raise TypeError('Unknown ArsType type: {0}'.format(type.__class__.__name__))
def add_constant(self, constant=None, **kwargs):
if constant is None:
constant = ArsConstant(**kwargs)
self.add_type(constant.type)
self.constants.append(constant)
def add_service(self, service):
if service.base:
self.add_service(service.base)
for procedure in service.procedures:
self.add_type(procedure.return_type)
for parameter in procedure.parameters:
self.add_type(parameter.type)
for exception in procedure.exception_types:
self.add_type(exception)
self.services.append(service)
@DispatchOn(type=ArsAtomicType)
def deepcopy_type(self, type, new_interface):
return type
@DispatchOn(type=ArsNamedType)
def deepcopy_type(self, type, new_interface):
return new_interface.types[type.name]
@DispatchOn(type=ArsList)
def deepcopy_type(self, type, new_interface):
return ArsList(element_type=self.deepcopy_type(type.element_type, new_interface))
@DispatchOn(type=ArsSet)
def deepcopy_type(self, type, new_interface):
return ArsSet(element_type=self.deepcopy_type(type.element_type, new_interface))
@DispatchOn(type=ArsMap)
def deepcopy_type(self, type, new_interface):
return ArsMap(key_type=self.deepcopy_type(type.key_type, new_interface), value_type=self.deepcopy_type(type.value_type, new_interface))
@DispatchOn(type=ArsTypeAlias)
def deepcopy_type_first(self, type, new_interface):
return ArsTypeAlias(name=type.name, target_type=self.deepcopy_type(type.target_type, new_interface))
@DispatchOn(type=ArsStructure)
def deepcopy_type_first(self, type, new_interface):
ret = ArsStructure(name=type.name, base_index=type.base_index)
for field in type.fields:
ret.add_field(name=field.name, type=self.deepcopy_type(field.type, new_interface), optional=field.optional)
return ret
@DispatchOn(type=ArsException)
def deepcopy_type_first(self, type, new_interface):
ret = ArsException(name=type.name, base_index=type.base_index)
for field in type.fields:
ret.add_field(name=field.name, type=self.deepcopy_type(field.type, new_interface), optional=field.optional)
return ret
def deepcopy(self):
ret = ArsInterface()
for type in self.types:
ret.types.append(self.deepcopy_type_first(type, ret))
for constant in self.constants:
ret.constants.append(ArsConstant(type=self.deepcopy_type(constant.type, ret), value=type.value))
for service in self.services:
if service.base:
ret_service = ArsService(name=service.name, base=ret.services[service.base.name])
else:
ret_service = ArsService(name=service.name)
for procedure in service.procedures:
ret_procedure = ArsProcedure(name=procedure.name, return_type=self.deepcopy_type(procedure.return_type, ret), implementation=procedure.implementation)
for parameter in procedure.parameters:
ret_procedure.add_parameter(name=parameter.name, type=self.deepcopy_type(parameter.type, ret), optional=parameter.optional)
for exception in procedure.exception_types:
ret_procedure.add_exception(self.deepcopy_type(exception))
ret_service.add_procedure(ret_procedure)
ret.services.append(ret_service)
return ret
# additional type specialization
class ArsDateTime(ArsTypeAlias):
def __init__(self, ):
super(ArsDateTime, self).__init__(name='DateTime', target_type=ArsInt64)
def do_needs_conversion(self):
return True
def do_convert_to_ars(self, value):
return long(mktime(value.timetuple()))
def do_convert_from_ars(self, value):
return datetime.fromtimestamp(value)
ArsDateTime = ArsDateTime()
__all__ = [name for name in globals().keys() if name.startswith('Ars')]
| 31.63
| 166
| 0.646117
|
99c4e9fdfd6ab8a8a17dcd6f17a8a2dcfc9520c9
| 15,612
|
py
|
Python
|
pysnmp-with-texts/SONUS-TC.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 8
|
2019-05-09T17:04:00.000Z
|
2021-06-09T06:50:51.000Z
|
pysnmp-with-texts/SONUS-TC.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 4
|
2019-05-31T16:42:59.000Z
|
2020-01-31T21:57:17.000Z
|
pysnmp-with-texts/SONUS-TC.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module SONUS-TC (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SONUS-TC
# Produced by pysmi-0.3.4 at Wed May 1 15:09:32 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
iso, TimeTicks, Counter32, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity, Integer32, ModuleIdentity, NotificationType, IpAddress, Gauge32, Counter64, Unsigned32, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "TimeTicks", "Counter32", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity", "Integer32", "ModuleIdentity", "NotificationType", "IpAddress", "Gauge32", "Counter64", "Unsigned32", "MibIdentifier")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
sonusModules, = mibBuilder.importSymbols("SONUS-SMI", "sonusModules")
sonusTextualConventions = ModuleIdentity((1, 3, 6, 1, 4, 1, 2879, 3, 1))
if mibBuilder.loadTexts: sonusTextualConventions.setLastUpdated('200107310000Z')
if mibBuilder.loadTexts: sonusTextualConventions.setOrganization('Sonus Networks, Inc.')
if mibBuilder.loadTexts: sonusTextualConventions.setContactInfo(' Sonus Networks, Inc. Customer Service Postal: 5 Carlisle Rd Westford, MA 01886 USA Tel: 978-692-8999 E-mail: cs.snmp@sonusnet.com')
if mibBuilder.loadTexts: sonusTextualConventions.setDescription('This module defines textual conventions used throughout sonus enterprise mibs.')
class HwTypeID(TextualConvention, Integer32):
description = 'Represents the different types of hardware components that are available in a GSX9000 shelf.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50))
namedValues = NamedValues(("none", 1), ("unknown", 14), ("undefined", 15), ("mns10", 16), ("pns10", 17), ("cns10", 18), ("cns30", 19), ("mta01", 20), ("mna10", 21), ("pna10", 22), ("cna33", 23), ("cna10", 24), ("cna30", 25), ("sonicPlane", 26), ("fanTray", 27), ("mta10", 28), ("cna01", 29), ("cna03", 30), ("cns20", 31), ("pns20", 32), ("sps60", 33), ("cna20", 34), ("cna21", 35), ("pna21", 36), ("pna23", 37), ("cns61", 38), ("cna61", 39), ("mta20", 40), ("mta21", 41), ("cna06", 42), ("cns31", 43), ("cns25", 44), ("cna25", 45), ("pns30", 46), ("pna30", 47), ("sonicPlane2", 48), ("cna02", 49), ("cna05", 50))
class ServerTypeID(TextualConvention, Integer32):
description = 'Represents the different types of server modules which are available in a GSX9000 shelf.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(16, 17, 18, 19, 31, 32, 44, 43, 46, 38))
namedValues = NamedValues(("mns10", 16), ("pns10", 17), ("cns10", 18), ("cns30", 19), ("cns20", 31), ("pns20", 32), ("cns25", 44), ("cns31", 43), ("pns30", 46), ("cns61", 38))
class AdapterTypeID(TextualConvention, Integer32):
description = 'Represents the different types of adapter modules which are available in a GSX9000 shelf.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 21, 22, 24, 25, 23, 29, 30, 34, 35, 36, 45, 47, 49, 50, 39, 42))
namedValues = NamedValues(("none", 1), ("mna10", 21), ("pna10", 22), ("cna10", 24), ("cna30", 25), ("cna33", 23), ("cna01", 29), ("cna03", 30), ("cna20", 34), ("cna21", 35), ("pna21", 36), ("cna25", 45), ("pna30", 47), ("cna02", 49), ("cna05", 50), ("cna61", 39), ("cna06", 42))
class ServerFunctionType(TextualConvention, Integer32):
description = 'Represents the logical function of the server/adapter pair'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
namedValues = NamedValues(("default", 1), ("atm", 2), ("mgmt", 3), ("t1", 4), ("e1", 5), ("t3", 6), ("sps", 7), ("oc3tdm", 8), ("pos", 9), ("enet", 10))
class SonusShelfIndex(TextualConvention, Integer32):
description = 'The index for a Sonus shelf.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 6)
class SonusSlotIndex(TextualConvention, Integer32):
description = 'The index for a Sonus slot.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 16)
class SonusEventString(TextualConvention, OctetString):
description = 'This data type is used to model textual information taken from the NVT ASCII character set.'
status = 'current'
displayHint = '511a'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 511)
class SonusEventClass(TextualConvention, Integer32):
description = 'The categories of which events are classified by.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))
namedValues = NamedValues(("sysmgmt", 1), ("callproc", 2), ("resmgmt", 3), ("directory", 4), ("netmgmt", 5), ("signaling", 6), ("routing", 7), ("trace", 8))
class SonusEventLevel(TextualConvention, Integer32):
description = 'The severity level of events.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("critical", 1), ("major", 2), ("minor", 3), ("info", 4))
class SonusEventFilterLevel(TextualConvention, Integer32):
description = 'The filter severity level of events.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))
namedValues = NamedValues(("noevents", 0), ("critical", 1), ("major", 2), ("minor", 3), ("info", 4))
class SonusName(TextualConvention, OctetString):
description = 'This data type is used to model textual information taken from the NVT ASCII character set. The SonusName type is used to label Sonus Named objects.'
status = 'current'
displayHint = '255a'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(1, 23)
class SonusNameReference(TextualConvention, OctetString):
description = 'This data type is used to model textual information taken from the NVT ASCII character set. The SonusNameReference type is used to refer to Sonus Named objects.'
status = 'current'
displayHint = '255a'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 23)
class SonusBoolean(TextualConvention, Integer32):
description = 'This data type is used to model a boolean value expressed as true or false.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("false", 1), ("true", 2))
class PointCode(TextualConvention, Integer32):
description = 'A three octet data type representing the SS7 PointCode. The octets represent Network-Cluster-Member values respectively.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 16777215)
class SonusSysId(TextualConvention, Integer32):
description = 'An INTEGER representing the sub-system identifiers for software components in a GSX9000 node.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63))
namedValues = NamedValues(("evlog", 1), ("ncm", 2), ("nrs", 3), ("fm", 4), ("sm", 5), ("sma", 6), ("nrm", 7), ("nrma", 8), ("brm", 9), ("drm", 10), ("prm", 11), ("xrm", 12), ("cc", 13), ("icm", 14), ("ipdh", 15), ("epdh", 16), ("ds", 17), ("debug", 18), ("trm", 19), ("sg", 20), ("gwfe", 21), ("gwsg", 22), ("sg-7", 23), ("rtcp", 24), ("tccs", 25), ("cli", 26), ("snmp", 27), ("hsim", 28), ("lesim", 29), ("ss7fe", 30), ("led", 31), ("param", 32), ("cb", 33), ("acm", 34), ("ntp", 35), ("diag", 36), ("cam", 37), ("asg", 38), ("mgsg", 39), ("ipdcfe", 40), ("ltt", 41), ("tm", 42), ("stm", 43), ("sta", 44), ("nfs", 45), ("isdn", 46), ("enm", 47), ("arm", 48), ("arma", 49), ("rtm", 50), ("psdh", 51), ("atmrm", 52), ("dcl", 53), ("psd", 54), ("cassg", 55), ("pfa", 56), ("mgcpfe", 57), ("spm", 58), ("spma", 59), ("h323sg", 60), ("h323fe", 61), ("sipsg", 62), ("sipfe", 63))
class SonusServiceState(TextualConvention, Integer32):
description = 'The service state of a resource.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("outOfService", 1), ("inService", 2))
class SonusAdminState(TextualConvention, Integer32):
description = 'The administrative state of a resource.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("disabled", 1), ("enabled", 2))
class SonusAdminAction(TextualConvention, Integer32):
description = 'The action assignment of a resource.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("dryUp", 1), ("force", 2))
class SonusCircuitState(TextualConvention, Integer32):
description = 'The state of an ISUP circuit.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("unblocked", 1), ("blocked", 2), ("transientUnblock", 3), ("transientBlock", 4), ("notAvailable", 5))
class SonusMtaSlotIndex(TextualConvention, Integer32):
description = 'The index identifying an MTA slot'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("mta1", 1), ("mta2", 2))
class SonusTimingSource(TextualConvention, Integer32):
description = 'Timing source types. extClkA and extClkB specifiy either BITS or SETS clock inputs, depending on whether the MTA is an MTA10(BITS) or MTA20(SETS). refClkA and refClkB indicate recovered/derived from a DS1 (for instance)'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("extClkA", 1), ("extClkB", 2), ("refClkA", 3), ("refClkB", 4), ("oscillator", 5), ("holdover", 6))
class SonusSoftwareVersion(TextualConvention, OctetString):
description = 'Octet string that identifies the version of the runtime software application: Byte(s) Code ------- ---- 0 major version 1 minor version 2 release version 3 type (1:alpha, 2:beta, 3:release, 4:special) 4-5 type number'
status = 'current'
displayHint = '1d.1d.1d.1d.2d'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(6, 6)
fixedLength = 6
class SonusSystemName(TextualConvention, OctetString):
description = 'This data type refers to the name of networking device.'
status = 'current'
displayHint = '255a'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 255)
class SonusTrapType(TextualConvention, Integer32):
description = 'The type of trap PDU to be generated: none - no PDU is generated trapv1 - SNMPv1-Trap-PDU trapv2 - SNMPv2-Trap-PDU inform - InformRequestPDU'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))
namedValues = NamedValues(("none", 0), ("trapv1", 1), ("trapv2", 2), ("inform", 3))
class SonusAccessLevel(TextualConvention, Integer32):
description = 'The User and Mgmt Client access level.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("readOnly", 1), ("readWrite", 2), ("admin", 3))
class SonusPointCode(TextualConvention, OctetString):
description = 'Octet string that represents a point code. A point code may be comprised of one or more sub-values. Each sub-value is stored in two bytes of the octet string. The first two bytes of the octet string contains the count of the two-byte sub-values that comprise the point code. The remaining bytes of the octet string contain the two-byte sub-values. For example: Point Code Octet String ---------- ------------ 4-200-7 00 04 00 C8 00 07 2-1000-3 00 02 03 E8 00 08 15-750 00 FF 02 EE 16000 3E 80'
status = 'current'
displayHint = '2d-'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(1, 64)
class SonusPointCodeFormat(TextualConvention, OctetString):
description = 'Octet string that represents a point code format, such as 2-10-2, 3-8-3, 4-10 or 14. A point code format specifies the number of bits that each sub-value of a point code value is stored in. The total number of bits should be 14. A point code format is used to specify how a point code value should be stored in a 14 bit value. A point code format may be comprised of one or more numbers. Each number is stored in one byte of the octet string. The first byte of the octet string contains the count of the one-byte numbers that comprise the point code format. The remaining bytes of the octet string contain the one-byte numbers. For example: Point Code Format Octet String ----------------- ------------ 3-8-3 03 08 03 2-10-2 02 0A 02 4-10 04 0A 14 0E'
status = 'current'
displayHint = '1d-'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(1, 32)
class SonusSupportFlag(TextualConvention, Integer32):
description = 'The flag to indicate if the attribute is supported.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("unsupported", 1), ("supported", 2))
mibBuilder.exportSymbols("SONUS-TC", SonusAdminAction=SonusAdminAction, ServerFunctionType=ServerFunctionType, SonusSoftwareVersion=SonusSoftwareVersion, SonusShelfIndex=SonusShelfIndex, SonusSlotIndex=SonusSlotIndex, SonusAdminState=SonusAdminState, SonusEventString=SonusEventString, SonusEventClass=SonusEventClass, SonusServiceState=SonusServiceState, HwTypeID=HwTypeID, SonusNameReference=SonusNameReference, AdapterTypeID=AdapterTypeID, SonusName=SonusName, PYSNMP_MODULE_ID=sonusTextualConventions, sonusTextualConventions=sonusTextualConventions, SonusPointCode=SonusPointCode, SonusSupportFlag=SonusSupportFlag, SonusTimingSource=SonusTimingSource, SonusBoolean=SonusBoolean, SonusAccessLevel=SonusAccessLevel, SonusEventLevel=SonusEventLevel, PointCode=PointCode, SonusPointCodeFormat=SonusPointCodeFormat, SonusMtaSlotIndex=SonusMtaSlotIndex, SonusCircuitState=SonusCircuitState, SonusEventFilterLevel=SonusEventFilterLevel, SonusSysId=SonusSysId, ServerTypeID=ServerTypeID, SonusSystemName=SonusSystemName, SonusTrapType=SonusTrapType)
| 83.486631
| 1,047
| 0.710735
|
a45240a5828aca8da6cbc334e818fc9914b61ea0
| 5,334
|
py
|
Python
|
src/lib/datasets/dataset/coco.py
|
ThanhDinhDat/center_net_points
|
7747dfd6cf0c2ac4c54f3f121106346924e20e37
|
[
"MIT"
] | null | null | null |
src/lib/datasets/dataset/coco.py
|
ThanhDinhDat/center_net_points
|
7747dfd6cf0c2ac4c54f3f121106346924e20e37
|
[
"MIT"
] | null | null | null |
src/lib/datasets/dataset/coco.py
|
ThanhDinhDat/center_net_points
|
7747dfd6cf0c2ac4c54f3f121106346924e20e37
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pycocotools.coco as coco
from pycocotools.cocoeval import COCOeval
import numpy as np
import json
import os
import torch.utils.data as data
class COCO(data.Dataset):
num_classes = 80
default_resolution = [512, 512]
mean = np.array([0.40789654, 0.44719302, 0.47026115],
dtype=np.float32).reshape(1, 1, 3)
std = np.array([0.28863828, 0.27408164, 0.27809835],
dtype=np.float32).reshape(1, 1, 3)
def __init__(self, opt, split):
super(COCO, self).__init__()
self.data_dir = os.path.join(opt.data_dir, 'coco')
self.img_dir = os.path.join(self.data_dir, '{}2014'.format(split))
if split == 'test':
self.annot_path = os.path.join(
self.data_dir, 'annotations',
'instaces_{}2014.json').format(split)
else:
if opt.task == 'exdet':
self.annot_path = os.path.join(
self.data_dir, 'annotations',
'instaces_{}2014.json').format(split)
else:
self.annot_path = os.path.join(
self.data_dir, 'annotations',
'instances_{}2014.json').format(split)
self.max_objs = 128
# self.class_name = [
# '__background__', 'person', 'bicycle', 'car', 'motorcycle', 'airplane',
# 'bus', 'train', 'truck', 'boat', 'traffic light', 'fire hydrant',
# 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse',
# 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack',
# 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis',
# 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove',
# 'skateboard', 'surfboard', 'tennis racket', 'bottle', 'wine glass',
# 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich',
# 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake',
# 'chair', 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',
# 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',
# 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',
# 'scissors', 'teddy bear', 'hair drier', 'toothbrush']
# self._valid_ids = [
# 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
# 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
# 24, 25, 27, 28, 31, 32, 33, 34, 35, 36,
# 37, 38, 39, 40, 41, 42, 43, 44, 46, 47,
# 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
# 58, 59, 60, 61, 62, 63, 64, 65, 67, 70,
# 72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
# 82, 84, 85, 86, 87, 88, 89, 90]
self.class_name = [
'__background__', 'person']
self._valid_ids = [
1]
self.cat_ids = {v: i for i, v in enumerate(self._valid_ids)}
self.voc_color = [(v // 32 * 64 + 64, (v // 8) % 4 * 64, v % 8 * 32) \
for v in range(1, self.num_classes + 1)]
self._data_rng = np.random.RandomState(123)
self._eig_val = np.array([0.2141788, 0.01817699, 0.00341571],
dtype=np.float32)
self._eig_vec = np.array([
[-0.58752847, -0.69563484, 0.41340352],
[-0.5832747, 0.00994535, -0.81221408],
[-0.56089297, 0.71832671, 0.41158938]
], dtype=np.float32)
# self.mean = np.array([0.485, 0.456, 0.406], np.float32).reshape(1, 1, 3)
# self.std = np.array([0.229, 0.224, 0.225], np.float32).reshape(1, 1, 3)
self.split = split
self.opt = opt
print('==> initializing coco 2017 {} data.'.format(split))
self.coco = coco.COCO(self.annot_path)
self.images = self.coco.getImgIds()
self.num_samples = len(self.images)
print('Loaded {} {} samples'.format(split, self.num_samples))
def _to_float(self, x):
return float("{:.2f}".format(x))
def convert_eval_format(self, all_bboxes):
# import pdb; pdb.set_trace()
detections = []
for image_id in all_bboxes:
for cls_ind in all_bboxes[image_id]:
category_id = self._valid_ids[cls_ind - 1]
for bbox in all_bboxes[image_id][cls_ind]:
bbox[2] -= bbox[0]
bbox[3] -= bbox[1]
score = bbox[4]
bbox_out = list(map(self._to_float, bbox[0:4]))
detection = {
"image_id": int(image_id),
"category_id": int(category_id),
"bbox": bbox_out,
"score": float("{:.2f}".format(score))
}
if len(bbox) > 5:
extreme_points = list(map(self._to_float, bbox[5:13]))
detection["extreme_points"] = extreme_points
detections.append(detection)
return detections
def __len__(self):
return self.num_samples
def save_results(self, results, save_dir):
json.dump(self.convert_eval_format(results),
open('{}/results.json'.format(save_dir), 'w'))
def run_eval(self, results, save_dir):
# result_json = os.path.join(save_dir, "results.json")
# detections = self.convert_eval_format(results)
# json.dump(detections, open(result_json, "w"))
self.save_results(results, save_dir)
coco_dets = self.coco.loadRes('{}/results.json'.format(save_dir))
coco_eval = COCOeval(self.coco, coco_dets, "bbox")
coco_eval.evaluate()
coco_eval.accumulate()
coco_eval.summarize()
| 39.80597
| 80
| 0.584927
|
7bbc7955d75aedc7027b7738eb3bc05cec8c1465
| 1,270
|
py
|
Python
|
run.py
|
dogoncouch/dhcptranslate
|
ed0387c60da14bf6bf5fee79270affbea909db92
|
[
"MIT"
] | null | null | null |
run.py
|
dogoncouch/dhcptranslate
|
ed0387c60da14bf6bf5fee79270affbea909db92
|
[
"MIT"
] | null | null | null |
run.py
|
dogoncouch/dhcptranslate
|
ed0387c60da14bf6bf5fee79270affbea909db92
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# MIT License
#
# Copyright (c) 2020 Dan Persons <dogoncouch@dogoncouch.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from dhcptranslate.core import DHCPTranslateCore
translator = DHCPTranslateCore()
translator.run_script()
| 43.793103
| 80
| 0.779528
|
219c81861a04a4aebb55fa03808752301c9f728c
| 5,239
|
py
|
Python
|
google/ads/google_ads/v6/proto/errors/bidding_strategy_error_pb2.py
|
jphanwebstaurant/google-ads-python
|
600812b2afcc4d57f00b47dfe436620ce50bfe9b
|
[
"Apache-2.0"
] | 1
|
2021-04-09T04:28:47.000Z
|
2021-04-09T04:28:47.000Z
|
google/ads/google_ads/v6/proto/errors/bidding_strategy_error_pb2.py
|
jphanwebstaurant/google-ads-python
|
600812b2afcc4d57f00b47dfe436620ce50bfe9b
|
[
"Apache-2.0"
] | null | null | null |
google/ads/google_ads/v6/proto/errors/bidding_strategy_error_pb2.py
|
jphanwebstaurant/google-ads-python
|
600812b2afcc4d57f00b47dfe436620ce50bfe9b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v6/proto/errors/bidding_strategy_error.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v6/proto/errors/bidding_strategy_error.proto',
package='google.ads.googleads.v6.errors',
syntax='proto3',
serialized_options=b'\n\"com.google.ads.googleads.v6.errorsB\031BiddingStrategyErrorProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v6/errors;errors\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V6.Errors\312\002\036Google\\Ads\\GoogleAds\\V6\\Errors\352\002\"Google::Ads::GoogleAds::V6::Errors',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\nAgoogle/ads/googleads_v6/proto/errors/bidding_strategy_error.proto\x12\x1egoogle.ads.googleads.v6.errors\x1a\x1cgoogle/api/annotations.proto\"\x9b\x02\n\x18\x42iddingStrategyErrorEnum\"\xfe\x01\n\x14\x42iddingStrategyError\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07UNKNOWN\x10\x01\x12\x12\n\x0e\x44UPLICATE_NAME\x10\x02\x12\'\n#CANNOT_CHANGE_BIDDING_STRATEGY_TYPE\x10\x03\x12%\n!CANNOT_REMOVE_ASSOCIATED_STRATEGY\x10\x04\x12\"\n\x1e\x42IDDING_STRATEGY_NOT_SUPPORTED\x10\x05\x12@\n<INCOMPATIBLE_BIDDING_STRATEGY_AND_BIDDING_STRATEGY_GOAL_TYPE\x10\x06\x42\xf4\x01\n\"com.google.ads.googleads.v6.errorsB\x19\x42iddingStrategyErrorProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v6/errors;errors\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V6.Errors\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V6\\Errors\xea\x02\"Google::Ads::GoogleAds::V6::Errorsb\x06proto3'
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_BIDDINGSTRATEGYERRORENUM_BIDDINGSTRATEGYERROR = _descriptor.EnumDescriptor(
name='BiddingStrategyError',
full_name='google.ads.googleads.v6.errors.BiddingStrategyErrorEnum.BiddingStrategyError',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DUPLICATE_NAME', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CANNOT_CHANGE_BIDDING_STRATEGY_TYPE', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CANNOT_REMOVE_ASSOCIATED_STRATEGY', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BIDDING_STRATEGY_NOT_SUPPORTED', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INCOMPATIBLE_BIDDING_STRATEGY_AND_BIDDING_STRATEGY_GOAL_TYPE', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=161,
serialized_end=415,
)
_sym_db.RegisterEnumDescriptor(_BIDDINGSTRATEGYERRORENUM_BIDDINGSTRATEGYERROR)
_BIDDINGSTRATEGYERRORENUM = _descriptor.Descriptor(
name='BiddingStrategyErrorEnum',
full_name='google.ads.googleads.v6.errors.BiddingStrategyErrorEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_BIDDINGSTRATEGYERRORENUM_BIDDINGSTRATEGYERROR,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=132,
serialized_end=415,
)
_BIDDINGSTRATEGYERRORENUM_BIDDINGSTRATEGYERROR.containing_type = _BIDDINGSTRATEGYERRORENUM
DESCRIPTOR.message_types_by_name['BiddingStrategyErrorEnum'] = _BIDDINGSTRATEGYERRORENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
BiddingStrategyErrorEnum = _reflection.GeneratedProtocolMessageType('BiddingStrategyErrorEnum', (_message.Message,), {
'DESCRIPTOR' : _BIDDINGSTRATEGYERRORENUM,
'__module__' : 'google.ads.googleads_v6.proto.errors.bidding_strategy_error_pb2'
,
'__doc__': """Container for enum describing possible bidding strategy errors.""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v6.errors.BiddingStrategyErrorEnum)
})
_sym_db.RegisterMessage(BiddingStrategyErrorEnum)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 43.297521
| 890
| 0.796908
|
5303810fcc40728aed2d5e8004b6a19a7ade75fd
| 4,333
|
py
|
Python
|
contrib/seeds/generate-seeds.py
|
devronkim/kumsl
|
a70f068020f977ac2c9b55e121593e5631a3abc4
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
devronkim/kumsl
|
a70f068020f977ac2c9b55e121593e5631a3abc4
|
[
"MIT"
] | null | null | null |
contrib/seeds/generate-seeds.py
|
devronkim/kumsl
|
a70f068020f977ac2c9b55e121593e5631a3abc4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef KUMSL_CHAINPARAMSSEEDS_H\n')
g.write('#define KUMSL_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the kumsl network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 9999)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 19999)
g.write('#endif // KUMSL_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
| 31.172662
| 98
| 0.578814
|
aef78cc82e5e411ffc72a597cb23dad86b12c247
| 1,128
|
py
|
Python
|
manage.py
|
nmaekawa/hxmirador
|
c5e364a92c3631126a7fd9335af506270f52fe68
|
[
"BSD-3-Clause"
] | null | null | null |
manage.py
|
nmaekawa/hxmirador
|
c5e364a92c3631126a7fd9335af506270f52fe68
|
[
"BSD-3-Clause"
] | null | null | null |
manage.py
|
nmaekawa/hxmirador
|
c5e364a92c3631126a7fd9335af506270f52fe68
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
from dotenv import load_dotenv
if __name__ == "__main__":
# if dotenv file, load it
dotenv_path = os.environ.get("HXMIRADOR_DOTENV_PATH", None)
if dotenv_path:
load_dotenv(dotenv_path)
# define settings if not in environment, default is "dev"
if os.environ.get("DJANGO_SETTINGS_MODULE", None) is None:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hxmirador.settings.dev")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import my fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2
try:
import django # noqa
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 33.176471
| 81
| 0.658688
|
327a37a67a58b314caa95c02379bd85e44d7216f
| 722
|
py
|
Python
|
src/api/v1/villains/serializers.py
|
reiniervdwindt/power-ranger-api
|
13ce639a7f5e9d4b106ce5f094c076db0aad398e
|
[
"MIT"
] | null | null | null |
src/api/v1/villains/serializers.py
|
reiniervdwindt/power-ranger-api
|
13ce639a7f5e9d4b106ce5f094c076db0aad398e
|
[
"MIT"
] | null | null | null |
src/api/v1/villains/serializers.py
|
reiniervdwindt/power-ranger-api
|
13ce639a7f5e9d4b106ce5f094c076db0aad398e
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from series.models import Series
from villains.models import Villain
class VillainSeriesSerializer(serializers.ModelSerializer):
name = serializers.CharField()
class Meta(object):
fields = ('id', 'name',)
model = Series
class VillainDetailSerializer(serializers.ModelSerializer):
series = VillainSeriesSerializer(many=True)
class Meta(object):
fields = ('id', 'name', 'description', 'gender', 'type', 'homeworld', 'series',)
model = Villain
class VillainListSerializer(serializers.ModelSerializer):
class Meta(object):
fields = ('id', 'name', 'description', 'gender', 'type', 'homeworld',)
model = Villain
| 26.740741
| 88
| 0.688366
|
a80633daca594c22ec7816b28ccb468e5fa2e1de
| 174
|
py
|
Python
|
configs/train_classifier/r50_nih/r50_nih_cbfocal_50e.py
|
CAMP-eXplain-AI/imba-explain
|
e41b4ca5de63955cb0e925aad9599f38c5a3e973
|
[
"MIT"
] | null | null | null |
configs/train_classifier/r50_nih/r50_nih_cbfocal_50e.py
|
CAMP-eXplain-AI/imba-explain
|
e41b4ca5de63955cb0e925aad9599f38c5a3e973
|
[
"MIT"
] | null | null | null |
configs/train_classifier/r50_nih/r50_nih_cbfocal_50e.py
|
CAMP-eXplain-AI/imba-explain
|
e41b4ca5de63955cb0e925aad9599f38c5a3e973
|
[
"MIT"
] | null | null | null |
_base_ = [
'../../_base_/classifiers/resnet50.py', '../../_base_/datasets/nih_class_dataset.py',
'../../_base_/losses/cb_focal.py', '../../_base_/schedules/50e.py'
]
| 34.8
| 89
| 0.62069
|
0d82466edda8a90865c6d189e8dfd682779c1300
| 661
|
py
|
Python
|
contrib/performance/simanalysis/__init__.py
|
backwardn/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
[
"Apache-2.0"
] | 462
|
2016-08-14T17:43:24.000Z
|
2022-03-17T07:38:16.000Z
|
contrib/performance/simanalysis/__init__.py
|
backwardn/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
[
"Apache-2.0"
] | 72
|
2016-09-01T23:19:35.000Z
|
2020-02-05T02:09:26.000Z
|
contrib/performance/simanalysis/__init__.py
|
backwardn/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
[
"Apache-2.0"
] | 171
|
2016-08-16T03:50:30.000Z
|
2022-03-26T11:49:55.000Z
|
##
# Copyright (c) 2015-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Tools to manage sim runs and associated data.
"""
| 33.05
| 74
| 0.750378
|
79495bce3c41b8e34fdaa67622cac6da4443a55f
| 211
|
py
|
Python
|
Recursion/CodingNinjas/15_replace_pi.py
|
sounak95/100_days_of_code
|
50fbf088ce6ab2137aa216a30e3b3f828b278a22
|
[
"Apache-2.0"
] | null | null | null |
Recursion/CodingNinjas/15_replace_pi.py
|
sounak95/100_days_of_code
|
50fbf088ce6ab2137aa216a30e3b3f828b278a22
|
[
"Apache-2.0"
] | null | null | null |
Recursion/CodingNinjas/15_replace_pi.py
|
sounak95/100_days_of_code
|
50fbf088ce6ab2137aa216a30e3b3f828b278a22
|
[
"Apache-2.0"
] | null | null | null |
def replacePi(s):
if len(s)==1 or len(s)==0:
return s
if s.startswith("pi"):
return "3.14" + replacePi(s[2:])
else:
return s[0]+ replacePi(s[1:])
print(replacePi("pippi"))
| 17.583333
| 40
| 0.526066
|
99a67b669ed2b13f52104b295d264a4c8a98e4fd
| 152
|
py
|
Python
|
dian/photo/admin.py
|
deggs7/dian-server
|
7157831b50cb246a36fc7cd971e346e21747aafc
|
[
"MIT"
] | null | null | null |
dian/photo/admin.py
|
deggs7/dian-server
|
7157831b50cb246a36fc7cd971e346e21747aafc
|
[
"MIT"
] | 3
|
2016-10-04T09:57:44.000Z
|
2016-10-04T09:58:22.000Z
|
dian/photo/admin.py
|
diankuai/dian-server
|
7157831b50cb246a36fc7cd971e346e21747aafc
|
[
"MIT"
] | null | null | null |
#! -*- encoding:utf-8 -*-
from django.contrib import admin
from photo.models import Photo, Tag
admin.site.register(Photo)
admin.site.register(Tag)
| 13.818182
| 35
| 0.730263
|
0901566ed052a56841b7cbfe527ab060bc0d62be
| 5,949
|
py
|
Python
|
autotimer/src/AutoTimerPreview.py
|
TwolDE2/enigma2-plugins
|
06685a5ce6a65a8724d3b32c8f7906714650ca2c
|
[
"OLDAP-2.3"
] | 30
|
2015-05-08T22:10:00.000Z
|
2022-03-13T22:09:31.000Z
|
autotimer/src/AutoTimerPreview.py
|
TwolDE2/enigma2-plugins
|
06685a5ce6a65a8724d3b32c8f7906714650ca2c
|
[
"OLDAP-2.3"
] | 124
|
2015-04-27T21:30:48.000Z
|
2022-03-29T10:21:39.000Z
|
autotimer/src/AutoTimerPreview.py
|
TwolDE2/enigma2-plugins
|
06685a5ce6a65a8724d3b32c8f7906714650ca2c
|
[
"OLDAP-2.3"
] | 193
|
2015-01-10T09:21:26.000Z
|
2022-03-21T08:19:33.000Z
|
# -*- coding: UTF-8 -*-
from __future__ import absolute_import
# for localized messages
from . import _, removeBad
import six
# GUI (Screens)
from Screens.Screen import Screen
# GUI (Components)
from Components.ActionMap import ActionMap
from Components.Sources.List import List
from Components.Sources.StaticText import StaticText
from ServiceReference import ServiceReference
from Tools.FuzzyDate import FuzzyTime
from enigma import getDesktop
HD = False
if getDesktop(0).size().width() >= 1280:
HD = True
class AutoTimerPreview(Screen):
"""Preview Timers which would be set"""
if HD:
skin = """<screen name="AutoTimerPreview" title="Preview AutoTimer" position="center,center" size="680,480">
<ePixmap position="20,0" size="140,40" pixmap="skin_default/buttons/red.png" transparent="1" alphatest="on" />
<ePixmap position="180,0" size="140,40" pixmap="skin_default/buttons/green.png" transparent="1" alphatest="on" />
<ePixmap position="340,0" size="140,40" pixmap="skin_default/buttons/yellow.png" transparent="1" alphatest="on" />
<ePixmap position="500,0" size="140,40" pixmap="skin_default/buttons/blue.png" transparent="1" alphatest="on" />
<widget source="key_red" render="Label" position="20,0" zPosition="1" size="140,40" valign="center" halign="center" font="Regular;18" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" />
<widget source="key_yellow" render="Label" position="340,0" zPosition="1" size="140,40" valign="center" halign="center" font="Regular;18" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" />
<widget source="timerlist" render="Listbox" position="5,45" size="670,420" scrollbarMode="showAlways">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos=(2,2), size=(650,24), text = 3, font = 0, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER),
MultiContentEntryText(pos=(2,26), size=(655,30), text = 0, font = 1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER),
MultiContentEntryText(pos=(2,50), size=(500,20), text = 4, font = 1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER),
MultiContentEntryText(pos=(290,50), size=(333,20), text = 2, font = 1, flags = RT_HALIGN_RIGHT|RT_VALIGN_CENTER),
],
"fonts": [gFont("Regular", 20),gFont("Regular", 18)],
"itemHeight": 72
}
</convert>
</widget>
</screen>"""
else:
skin = """<screen name="AutoTimerPreview" title="Preview AutoTimer" position="center,center" size="565,280">
<ePixmap position="0,0" size="140,40" pixmap="skin_default/buttons/red.png" transparent="1" alphatest="on" />
<ePixmap position="140,0" size="140,40" pixmap="skin_default/buttons/green.png" transparent="1" alphatest="on" />
<ePixmap position="280,0" size="140,40" pixmap="skin_default/buttons/yellow.png" transparent="1" alphatest="on" />
<ePixmap position="420,0" size="140,40" pixmap="skin_default/buttons/blue.png" transparent="1" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" valign="center" halign="center" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" valign="center" halign="center" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" shadowOffset="-1,-1" />
<widget source="timerlist" render="Listbox" position="5,45" size="555,220" scrollbarMode="showAlways">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos=(2,2), size=(550,24), text = 3, font = 0, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER),
MultiContentEntryText(pos=(2,26), size=(555,30), text = 0, font = 1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER),
MultiContentEntryText(pos=(2,50), size=(400,20), text = 4, font = 1, flags = RT_HALIGN_LEFT|RT_VALIGN_CENTER),
MultiContentEntryText(pos=(290,50), size=(233,20), text = 2, font = 1, flags = RT_HALIGN_RIGHT|RT_VALIGN_CENTER),
],
"fonts": [gFont("Regular", 20),gFont("Regular", 18)],
"itemHeight": 72
}
</convert>
</widget>
</screen>"""
def __init__(self, session, timers):
Screen.__init__(self, session)
# Sort timers by begin
timers.sort(key=lambda x: x[1])
self.sort_type = 0
# name, begin, end, serviceref, timername -> name, begin, timername, sname, timestr
self.timers = []
for x in timers:
serviceref = removeBad(ServiceReference(x[3]).getServiceName())
if six.PY2:
serviceref = serviceref.encode('utf-8', 'ignore')
self.timers.append(
(x[0], x[1], x[4],
serviceref,
(("%s, %s ... %s (%d " + _("mins") + ")") % (FuzzyTime(x[1]) + FuzzyTime(x[2])[1:] + ((x[2] - x[1]) / 60,))))
)
self["timerlist"] = List(self.timers)
# Initialize Buttons
self["key_red"] = StaticText(_("Cancel"))
self["key_yellow"] = StaticText()
self.setSortDescription()
# Define Actions
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"cancel": self.cancel,
"save": self.save,
"yellow": self.sort
}
)
self.onLayoutFinish.append(self.setCustomTitle)
def setCustomTitle(self):
self.setTitle(_("Preview AutoTimer"))
def setSortDescription(self):
if self.sort_type == 1:
self["key_yellow"].text = _("Sort Time")
else:
self["key_yellow"].text = _("Sort AutoTimer")
def sort(self):
timers = self.timers
if timers:
current = self["timerlist"].current
idx = 0
for timer in timers:
if timer == current:
break
idx += 1
if self.sort_type == 1:
timers.sort(key=lambda x: x[1])
self.sort_type = 0
else:
timers.sort(key=lambda x: x[4].lower())
self.sort_type = 1
self["timerlist"].updateList(timers)
self["timerlist"].index = idx
self.setSortDescription()
def cancel(self):
self.close(None)
def save(self):
self.close(True)
| 42.191489
| 224
| 0.680114
|
6ee4e441af4f665f9cbac2bb257f74763ad52e0f
| 800
|
py
|
Python
|
problems/first_unique_character.py
|
smartdolphin/recommandation-tutorial
|
3bfa8f91a6d2d064db42dfb61c3640e1775e4c31
|
[
"MIT"
] | 1
|
2018-10-14T14:19:05.000Z
|
2018-10-14T14:19:05.000Z
|
problems/first_unique_character.py
|
smartdolphin/recommandation-tutorial
|
3bfa8f91a6d2d064db42dfb61c3640e1775e4c31
|
[
"MIT"
] | null | null | null |
problems/first_unique_character.py
|
smartdolphin/recommandation-tutorial
|
3bfa8f91a6d2d064db42dfb61c3640e1775e4c31
|
[
"MIT"
] | null | null | null |
# 387. First Unique Character in a String
# https://leetcode.com/problems/first-unique-character-in-a-string
import unittest
class Solution:
def firstUniqChar(self, s):
"""
:type s: str
:rtype: int
"""
if len(s) == 0:
return -1
dic = {}
for c in s:
if c not in dic:
dic[c] = 1
else:
dic[c] += 1
for i, c in enumerate(s):
if dic[c] == 1:
return i
return -1
class TestFirstUniqueChar(unittest.TestCase):
def test(self):
sol = Solution()
self.assertEqual(sol.firstUniqChar('leetcode'), 0)
self.assertEqual(sol.firstUniqChar('loveleetcode'), 2)
if __name__ == '__main__':
unittest.TestCase()
| 21.621622
| 66
| 0.52
|
1eaeebc90cfcb6754cf3501550795b566835f416
| 6,282
|
py
|
Python
|
python3-virtualenv/Lib/python3.6/site-packages/mako/filters.py
|
LindaNayeli104/mlh-orientation-hackathon-project
|
d86b58f76721a9d5f3374399bfc6d3b1445d16ca
|
[
"MIT"
] | null | null | null |
python3-virtualenv/Lib/python3.6/site-packages/mako/filters.py
|
LindaNayeli104/mlh-orientation-hackathon-project
|
d86b58f76721a9d5f3374399bfc6d3b1445d16ca
|
[
"MIT"
] | null | null | null |
python3-virtualenv/Lib/python3.6/site-packages/mako/filters.py
|
LindaNayeli104/mlh-orientation-hackathon-project
|
d86b58f76721a9d5f3374399bfc6d3b1445d16ca
|
[
"MIT"
] | 1
|
2021-06-20T19:28:37.000Z
|
2021-06-20T19:28:37.000Z
|
# mako/filters.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import codecs
import re
from mako import compat
from mako.compat import codepoint2name
from mako.compat import name2codepoint
from mako.compat import quote_plus
from mako.compat import unquote_plus
xml_escapes = {
"&": "&",
">": ">",
"<": "<",
'"': """, # also " in html-only
"'": "'", # also ' in html-only
}
# XXX: " is valid in HTML and XML
# ' is not valid HTML, but is valid XML
def legacy_html_escape(s):
"""legacy HTML escape for non-unicode mode."""
s = s.replace("&", "&")
s = s.replace(">", ">")
s = s.replace("<", "<")
s = s.replace('"', """)
s = s.replace("'", "'")
return s
try:
import markupsafe
html_escape = markupsafe.escape
except ImportError:
html_escape = legacy_html_escape
def xml_escape(string):
return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string)
def url_escape(string):
# convert into a list of octets
string = string.encode("utf8")
return quote_plus(string)
def legacy_url_escape(string):
# convert into a list of octets
return quote_plus(string)
def url_unescape(string):
text = unquote_plus(string)
if not is_ascii_str(text):
text = text.decode("utf8")
return text
def trim(string):
return string.strip()
class Decode(object):
def __getattr__(self, key):
def decode(x):
if isinstance(x, compat.text_type):
return x
elif not isinstance(x, compat.binary_type):
return decode(str(x))
else:
return compat.text_type(x, encoding=key)
return decode
decode = Decode()
_ASCII_re = re.compile(r"\A[\x00-\x7f]*\Z")
def is_ascii_str(text):
return isinstance(text, str) and _ASCII_re.match(text)
################################################################
class XMLEntityEscaper(object):
def __init__(self, codepoint2name, name2codepoint):
self.codepoint2entity = dict(
[
(c, compat.text_type("&%s;" % n))
for c, n in codepoint2name.items()
]
)
self.name2codepoint = name2codepoint
def escape_entities(self, text):
"""Replace characters with their character entity references.
Only characters corresponding to a named entity are replaced.
"""
return compat.text_type(text).translate(self.codepoint2entity)
def __escape(self, m):
codepoint = ord(m.group())
try:
return self.codepoint2entity[codepoint]
except (KeyError, IndexError):
return "&#x%X;" % codepoint
__escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
def escape(self, text):
"""Replace characters with their character references.
Replace characters by their named entity references.
Non-ASCII characters, if they do not have a named entity reference,
are replaced by numerical character references.
The return value is guaranteed to be ASCII.
"""
return self.__escapable.sub(
self.__escape, compat.text_type(text)
).encode("ascii")
# XXX: This regexp will not match all valid XML entity names__.
# (It punts on details involving involving CombiningChars and Extenders.)
#
# .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
__characterrefs = re.compile(
r"""& (?:
\#(\d+)
| \#x([\da-f]+)
| ( (?!\d) [:\w] [-.:\w]+ )
) ;""",
re.X | re.UNICODE,
)
def __unescape(self, m):
dval, hval, name = m.groups()
if dval:
codepoint = int(dval)
elif hval:
codepoint = int(hval, 16)
else:
codepoint = self.name2codepoint.get(name, 0xFFFD)
# U+FFFD = "REPLACEMENT CHARACTER"
if codepoint < 128:
return chr(codepoint)
return chr(codepoint)
def unescape(self, text):
"""Unescape character references.
All character references (both entity references and numerical
character references) are unescaped.
"""
return self.__characterrefs.sub(self.__unescape, text)
_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint)
html_entities_escape = _html_entities_escaper.escape_entities
html_entities_unescape = _html_entities_escaper.unescape
def htmlentityreplace_errors(ex):
"""An encoding error handler.
This python codecs error handler replaces unencodable
characters with HTML entities, or, if no HTML entity exists for
the character, XML character references::
>>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
'The cost was €12.'
"""
if isinstance(ex, UnicodeEncodeError):
# Handle encoding errors
bad_text = ex.object[ex.start : ex.end]
text = _html_entities_escaper.escape(bad_text)
return (compat.text_type(text), ex.end)
raise ex
codecs.register_error("htmlentityreplace", htmlentityreplace_errors)
# TODO: options to make this dynamic per-compilation will be added in a later
# release
DEFAULT_ESCAPES = {
"x": "filters.xml_escape",
"h": "filters.html_escape",
"u": "filters.url_escape",
"trim": "filters.trim",
"entity": "filters.html_entities_escape",
"unicode": "unicode",
"decode": "decode",
"str": "str",
"n": "n",
}
if compat.py3k:
DEFAULT_ESCAPES.update({"unicode": "str"})
NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy()
NON_UNICODE_ESCAPES["h"] = "filters.legacy_html_escape"
NON_UNICODE_ESCAPES["u"] = "filters.legacy_url_escape"
| 28.554545
| 78
| 0.588029
|
ee64b66a64618f4f6aa5d4ea20300b7fa809c627
| 11,045
|
py
|
Python
|
salt/modules/mac_user.py
|
skrobul/salt
|
ef7fb71082cce7a9783e00b9c65062fefae09263
|
[
"Apache-2.0"
] | 2
|
2017-09-17T21:10:35.000Z
|
2019-08-26T03:00:12.000Z
|
salt/modules/mac_user.py
|
skrobul/salt
|
ef7fb71082cce7a9783e00b9c65062fefae09263
|
[
"Apache-2.0"
] | null | null | null |
salt/modules/mac_user.py
|
skrobul/salt
|
ef7fb71082cce7a9783e00b9c65062fefae09263
|
[
"Apache-2.0"
] | 3
|
2021-02-23T08:12:48.000Z
|
2021-02-23T08:13:13.000Z
|
# -*- coding: utf-8 -*-
'''
Manage users on Mac OS 10.7+
'''
# Import python libs
try:
import pwd
except ImportError:
pass
import logging
import random
import string
import time
# Import salt libs
import salt.utils
from salt.exceptions import CommandExecutionError, SaltInvocationError
from salt._compat import string_types
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'user'
def __virtual__():
if __grains__.get('kernel') != 'Darwin':
return False
return __virtualname__ if _osmajor() >= 10.7 else False
def _osmajor():
if '_osmajor' not in __context__:
__context__['_osmajor'] = float(
'.'.join(str(__grains__['osrelease']).split('.')[0:2])
)
return __context__['_osmajor']
def _flush_dscl_cache():
'''
Flush dscl cache
'''
__salt__['cmd.run']('dscacheutil -flushcache')
def _dscl(cmd, ctype='create'):
'''
Run a dscl -create command
'''
if _osmajor() < 10.8:
source, noderoot = '.', ''
else:
source, noderoot = 'localhost', '/Local/Default'
return __salt__['cmd.run_all'](
'dscl {0} -{1} {2}{3}'.format(source, ctype, noderoot, cmd),
output_loglevel='quiet' if ctype == 'passwd' else False
)
def _first_avail_uid():
uids = set(x.pw_uid for x in pwd.getpwall())
for idx in xrange(501, 2 ** 32):
if idx not in uids:
return idx
def add(name,
uid=None,
gid=None,
groups=None,
home=None,
shell=None,
fullname=None,
createhome=True,
**kwargs):
'''
Add a user to the minion
CLI Example:
.. code-block:: bash
salt '*' user.add name <uid> <gid> <groups> <home> <shell>
'''
if info(name):
raise CommandExecutionError('User {0!r} already exists'.format(name))
if salt.utils.contains_whitespace(name):
raise SaltInvocationError('Username cannot contain whitespace')
if uid is None:
uid = _first_avail_uid()
if gid is None:
gid = 20 # gid 20 == 'staff', the default group
if home is None:
home = '/Users/{0}'.format(name)
if shell is None:
shell = '/bin/bash'
if fullname is None:
fullname = ''
# TODO: do createhome as well
if not isinstance(uid, int):
raise SaltInvocationError('uid must be an integer')
if not isinstance(gid, int):
raise SaltInvocationError('gid must be an integer')
_dscl('/Users/{0} UniqueID {1!r}'.format(name, uid))
_dscl('/Users/{0} PrimaryGroupID {1!r}'.format(name, gid))
_dscl('/Users/{0} UserShell {1!r}'.format(name, shell))
_dscl('/Users/{0} NFSHomeDirectory {1!r}'.format(name, home))
_dscl('/Users/{0} RealName {1!r}'.format(name, fullname))
# Set random password, since without a password the account will not be
# available. TODO: add shadow module
randpass = ''.join(
random.choice(string.letters + string.digits) for x in xrange(20)
)
_dscl('/Users/{0} {1!r}'.format(name, randpass), ctype='passwd')
# dscl buffers changes, sleep before setting group membership
time.sleep(1)
if groups:
chgroups(name, groups)
return True
def delete(name, *args):
'''
Remove a user from the minion
CLI Example:
.. code-block:: bash
salt '*' user.delete foo
'''
### NOTE: *args isn't used here but needs to be included in this function
### for compatibility with the user.absent state
if salt.utils.contains_whitespace(name):
raise SaltInvocationError('Username cannot contain whitespace')
if not info(name):
return True
# Remove from any groups other than primary group. Needs to be done since
# group membership is managed separately from users and an entry for the
# user will persist even after the user is removed.
chgroups(name, ())
return _dscl('/Users/{0}'.format(name), ctype='delete')['retcode'] == 0
def getent(refresh=False):
'''
Return the list of all info for all users
CLI Example:
.. code-block:: bash
salt '*' user.getent
'''
if 'user.getent' in __context__ and not refresh:
return __context__['user.getent']
ret = []
for data in pwd.getpwall():
ret.append(_format_info(data))
__context__['user.getent'] = ret
return ret
def chuid(name, uid):
'''
Change the uid for a named user
CLI Example:
.. code-block:: bash
salt '*' user.chuid foo 4376
'''
if not isinstance(uid, int):
raise SaltInvocationError('uid must be an integer')
pre_info = info(name)
if not pre_info:
raise CommandExecutionError('User {0!r} does not exist'.format(name))
if uid == pre_info['uid']:
return True
_dscl(
'/Users/{0} UniqueID {1!r} {2!r}'.format(name, pre_info['uid'], uid),
ctype='change'
)
# dscl buffers changes, sleep 1 second before checking if new value
# matches desired value
time.sleep(1)
return info(name).get('uid') == uid
def chgid(name, gid):
'''
Change the default group of the user
CLI Example:
.. code-block:: bash
salt '*' user.chgid foo 4376
'''
if not isinstance(gid, int):
raise SaltInvocationError('gid must be an integer')
pre_info = info(name)
if not pre_info:
raise CommandExecutionError('User {0!r} does not exist'.format(name))
if gid == pre_info['gid']:
return True
_dscl(
'/Users/{0} PrimaryGroupID {1!r} {2!r}'.format(
name, pre_info['gid'], gid
),
ctype='change'
)
# dscl buffers changes, sleep 1 second before checking if new value
# matches desired value
time.sleep(1)
return info(name).get('gid') == gid
def chshell(name, shell):
'''
Change the default shell of the user
CLI Example:
.. code-block:: bash
salt '*' user.chshell foo /bin/zsh
'''
pre_info = info(name)
if not pre_info:
raise CommandExecutionError('User {0!r} does not exist'.format(name))
if shell == pre_info['shell']:
return True
_dscl(
'/Users/{0} UserShell {1!r} {2!r}'.format(
name, pre_info['shell'], shell
),
ctype='change'
)
# dscl buffers changes, sleep 1 second before checking if new value
# matches desired value
time.sleep(1)
return info(name).get('shell') == shell
def chhome(name, home):
'''
Change the home directory of the user
CLI Example:
.. code-block:: bash
salt '*' user.chhome foo /Users/foo
'''
pre_info = info(name)
if not pre_info:
raise CommandExecutionError('User {0!r} does not exist'.format(name))
if home == pre_info['home']:
return True
_dscl(
'/Users/{0} NFSHomeDirectory {1!r} {2!r}'.format(
name, pre_info['home'], home
),
ctype='change'
)
# dscl buffers changes, sleep 1 second before checking if new value
# matches desired value
time.sleep(1)
return info(name).get('home') == home
def chfullname(name, fullname):
'''
Change the user's Full Name
CLI Example:
.. code-block:: bash
salt '*' user.chfullname foo 'Foo Bar'
'''
fullname = str(fullname)
pre_info = info(name)
if not pre_info:
raise CommandExecutionError('User {0!r} does not exist'.format(name))
if fullname == pre_info['fullname']:
return True
_dscl(
'/Users/{0} RealName {1!r}'.format(name, fullname),
# use a "create" command, because a "change" command would fail if
# current fullname is an empty string. The "create" will just overwrite
# this field.
ctype='create'
)
# dscl buffers changes, sleep 1 second before checking if new value
# matches desired value
time.sleep(1)
return info(name).get('fullname') == fullname
def chgroups(name, groups, append=False):
'''
Change the groups to which the user belongs. Note that the user's primary
group does not have to be one of the groups passed, membership in the
user's primary group is automatically assumed.
groups
Groups to which the user should belong, can be passed either as a
python list or a comma-separated string
append
Instead of removing user from groups not included in the ``groups``
parameter, just add user to any groups for which they are not members
CLI Example:
.. code-block:: bash
salt '*' user.chgroups foo wheel,root
'''
### NOTE: **args isn't used here but needs to be included in this
### function for compatibility with the user.present state
uinfo = info(name)
if not uinfo:
raise CommandExecutionError('User {0!r} does not exist'.format(name))
if isinstance(groups, string_types):
groups = groups.split(',')
bad_groups = [x for x in groups if salt.utils.contains_whitespace(x)]
if bad_groups:
raise SaltInvocationError(
'Invalid group name(s): {0}'.format(', '.join(bad_groups))
)
ugrps = set(list_groups(name))
desired = set(str(x) for x in groups if bool(str(x)))
primary_group = __salt__['file.gid_to_group'](uinfo['gid'])
if primary_group:
desired.add(primary_group)
if ugrps == desired:
return True
# Add groups from which user is missing
for group in desired - ugrps:
_dscl(
'/Groups/{0} GroupMembership {1}'.format(group, name),
ctype='append'
)
if not append:
# Remove from extra groups
for group in ugrps - desired:
_dscl(
'/Groups/{0} GroupMembership {1}'.format(group, name),
ctype='delete'
)
time.sleep(1)
return set(list_groups(name)) == desired
def info(name):
'''
Return user information
CLI Example:
.. code-block:: bash
salt '*' user.info root
'''
try:
data = pwd.getpwnam(name)
except KeyError:
return {}
else:
return _format_info(data)
def _format_info(data):
'''
Return user information in a pretty way
'''
return {'gid': data.pw_gid,
'groups': list_groups(data.pw_name),
'home': data.pw_dir,
'name': data.pw_name,
'shell': data.pw_shell,
'uid': data.pw_uid,
'fullname': data.pw_gecos}
def list_groups(name):
'''
Return a list of groups the named user belongs to
CLI Example:
.. code-block:: bash
salt '*' user.list_groups foo
'''
groups = [group for group in salt.utils.get_group_list(name) if not group.startswith('_')]
return groups
def list_users():
'''
Return a list of all users
CLI Example:
.. code-block:: bash
salt '*' user.list_users
'''
return sorted([user.pw_name for user in pwd.getpwall()])
| 25.988235
| 94
| 0.60679
|
04dfd36d4165ab13c9964de81a3607e4d9aeb25e
| 925
|
py
|
Python
|
superai/data_program/router/router.py
|
mysuperai/superai-sdk
|
796c411c6ab69209600bf727e8fd08c20f4d67b1
|
[
"Apache-2.0"
] | 1
|
2020-12-03T18:18:16.000Z
|
2020-12-03T18:18:16.000Z
|
superai/data_program/router/router.py
|
mysuperai/superai-sdk
|
796c411c6ab69209600bf727e8fd08c20f4d67b1
|
[
"Apache-2.0"
] | 13
|
2021-02-22T18:27:58.000Z
|
2022-02-10T08:14:10.000Z
|
superai/data_program/router/router.py
|
mysuperai/superai-sdk
|
796c411c6ab69209600bf727e8fd08c20f4d67b1
|
[
"Apache-2.0"
] | 1
|
2021-04-27T12:38:47.000Z
|
2021-04-27T12:38:47.000Z
|
import os
from abc import ABC, abstractmethod
from typing import Dict, List
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from superai.data_program import DataProgram
from superai import Client
from ..workflow import Workflow
from superai.log import logger
log = logger.get_logger(__name__)
class Router(ABC):
def __init__(
self,
name: str = "router", # Can't get overriden for now
dataprorgam: "DataProgram" = None,
client: Client = None,
**kwargs,
):
"""
:param workflows:
:param metrics:
:param prefix:
:param name:
"""
if name != "router":
raise AttributeError("Router name is constraint to 'router'")
self.name = name
self.client = client
self.dataprogram = dataprorgam
self.kkwargs = kwargs
@abstractmethod
def subscribe_wf(self):
pass
| 21.511628
| 73
| 0.621622
|
725bf7ec9da863aa4067d487afd073d70a1892d5
| 306
|
py
|
Python
|
Hackerearth Set/TheBestInternetBrowser.py
|
Siddharth2016/PYTHON3_prog
|
9dfa258d87f5b00779d39d9de9a49c1c6cea06be
|
[
"MIT"
] | 2
|
2019-02-26T14:06:53.000Z
|
2019-02-27T17:13:01.000Z
|
Hackerearth Set/TheBestInternetBrowser.py
|
Siddharth2016/PYTHON3_prog
|
9dfa258d87f5b00779d39d9de9a49c1c6cea06be
|
[
"MIT"
] | null | null | null |
Hackerearth Set/TheBestInternetBrowser.py
|
Siddharth2016/PYTHON3_prog
|
9dfa258d87f5b00779d39d9de9a49c1c6cea06be
|
[
"MIT"
] | 2
|
2017-12-26T07:59:57.000Z
|
2018-06-24T03:35:05.000Z
|
# THE BEST INTERNET BROWSER
for _ in range(int(input())):
st = str(input())
countvowel = st.count('a') + st.count('e') + st.count('i') + st.count('o') + st.count('u') - 1
totallen = len(st)
urllen = totallen - 4 - countvowel
res = str(urllen)+"/"+str(totallen)
print(res)
| 30.6
| 99
| 0.558824
|
14f9932f3fdd47ec6a18d374a4c43f9dec2e5bb0
| 3,628
|
py
|
Python
|
datasets/cifar10/cifar10.py
|
TheVinhLuong102/HuggingFace-DataSets
|
e42e15f92c1033e5df9348f6b6c0d441489aa187
|
[
"Apache-2.0"
] | 10,608
|
2020-09-10T15:47:50.000Z
|
2022-03-31T22:51:47.000Z
|
datasets/cifar10/cifar10.py
|
realChainLife/datasets
|
98261e8b0b7be4dbaaa71ae188b950f7fbe51bbd
|
[
"Apache-2.0"
] | 2,396
|
2020-09-10T14:55:31.000Z
|
2022-03-31T19:41:04.000Z
|
datasets/cifar10/cifar10.py
|
realChainLife/datasets
|
98261e8b0b7be4dbaaa71ae188b950f7fbe51bbd
|
[
"Apache-2.0"
] | 1,530
|
2020-09-10T21:43:10.000Z
|
2022-03-31T01:59:12.000Z
|
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""CIFAR-10 Data Set"""
import pickle
import numpy as np
import datasets
from datasets.tasks import ImageClassification
_CITATION = """\
@TECHREPORT{Krizhevsky09learningmultiple,
author = {Alex Krizhevsky},
title = {Learning multiple layers of features from tiny images},
institution = {},
year = {2009}
}
"""
_DESCRIPTION = """\
The CIFAR-10 dataset consists of 60000 32x32 colour images in 10 classes, with 6000 images
per class. There are 50000 training images and 10000 test images.
"""
_DATA_URL = "https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz"
_NAMES = [
"airplane",
"automobile",
"bird",
"cat",
"deer",
"dog",
"frog",
"horse",
"ship",
"truck",
]
class Cifar10(datasets.GeneratorBasedBuilder):
"""CIFAR-10 Data Set"""
BUILDER_CONFIGS = [
datasets.BuilderConfig(
name="plain_text",
version=datasets.Version("1.0.0", ""),
description="Plain text import of CIFAR-10 Data Set",
)
]
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"img": datasets.Image(),
"label": datasets.features.ClassLabel(names=_NAMES),
}
),
supervised_keys=("img", "label"),
homepage="https://www.cs.toronto.edu/~kriz/cifar.html",
citation=_CITATION,
task_templates=ImageClassification(image_column="img", label_column="label", labels=_NAMES),
)
def _split_generators(self, dl_manager):
archive = dl_manager.download(_DATA_URL)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN, gen_kwargs={"files": dl_manager.iter_archive(archive), "split": "train"}
),
datasets.SplitGenerator(
name=datasets.Split.TEST, gen_kwargs={"files": dl_manager.iter_archive(archive), "split": "test"}
),
]
def _generate_examples(self, files, split):
"""This function returns the examples in the raw (text) form."""
if split == "train":
batches = ["data_batch_1", "data_batch_2", "data_batch_3", "data_batch_4", "data_batch_5"]
if split == "test":
batches = ["test_batch"]
batches = [f"cifar-10-batches-py/{filename}" for filename in batches]
for path, fo in files:
if path in batches:
dict = pickle.load(fo, encoding="bytes")
labels = dict[b"labels"]
images = dict[b"data"]
for idx, _ in enumerate(images):
img_reshaped = np.transpose(np.reshape(images[idx], (3, 32, 32)), (1, 2, 0))
yield f"{path}_{idx}", {
"img": img_reshaped,
"label": labels[idx],
}
| 29.737705
| 115
| 0.600882
|
49b2a2e0885d0398c37b4600126c15e7e9ec6286
| 113
|
py
|
Python
|
Unit1/StarDraw.py
|
yuhao1998/PythonStudy
|
fa678f0352673a6934b8f5b1689777af531f3675
|
[
"Apache-2.0"
] | null | null | null |
Unit1/StarDraw.py
|
yuhao1998/PythonStudy
|
fa678f0352673a6934b8f5b1689777af531f3675
|
[
"Apache-2.0"
] | null | null | null |
Unit1/StarDraw.py
|
yuhao1998/PythonStudy
|
fa678f0352673a6934b8f5b1689777af531f3675
|
[
"Apache-2.0"
] | null | null | null |
from turtle import *
color('red','red')
begin_fill()
for i in range(5):
fd(200)
rt(144)
end_fill()
done()
| 14.125
| 20
| 0.628319
|
5d6b00b7e4cec8a3028f1eacf3e2c7fa79452a77
| 69,527
|
py
|
Python
|
tests/common.py
|
xbcnn/emscripten
|
4cf36b6df367326e4d54ab6004466efe7ed3f265
|
[
"MIT"
] | null | null | null |
tests/common.py
|
xbcnn/emscripten
|
4cf36b6df367326e4d54ab6004466efe7ed3f265
|
[
"MIT"
] | null | null | null |
tests/common.py
|
xbcnn/emscripten
|
4cf36b6df367326e4d54ab6004466efe7ed3f265
|
[
"MIT"
] | null | null | null |
# Copyright 2021 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
from enum import Enum
from functools import wraps
from pathlib import Path
from subprocess import PIPE, STDOUT
from urllib.parse import unquote, unquote_plus
from http.server import HTTPServer, SimpleHTTPRequestHandler
import contextlib
import difflib
import hashlib
import logging
import multiprocessing
import os
import re
import shlex
import shutil
import stat
import string
import subprocess
import sys
import tempfile
import time
import webbrowser
import unittest
import clang_native
import jsrun
from tools.shared import TEMP_DIR, EMCC, EMXX, DEBUG, EMCONFIGURE, EMCMAKE
from tools.shared import EMSCRIPTEN_TEMP_DIR
from tools.shared import get_canonical_temp_dir, try_delete, path_from_root
from tools.utils import MACOS, WINDOWS, read_file, read_binary, write_file, write_binary, exit_with_error
from tools import shared, line_endings, building, config
logger = logging.getLogger('common')
# User can specify an environment variable EMTEST_BROWSER to force the browser
# test suite to run using another browser command line than the default system
# browser.
# There are two special value that can be used here if running in an actual
# browser is not desired:
# EMTEST_BROWSER=0 : This will disable the actual running of the test and simply
# verify that it compiles and links.
# EMTEST_BROWSER=node : This will attempt to run the browser test under node.
# For most browser tests this does not work, but it can
# be useful for running pthread tests under node.
EMTEST_BROWSER = None
EMTEST_DETECT_TEMPFILE_LEAKS = None
EMTEST_SAVE_DIR = None
# generally js engines are equivalent, testing 1 is enough. set this
# to force testing on all js engines, good to find js engine bugs
EMTEST_ALL_ENGINES = None
EMTEST_SKIP_SLOW = None
EMTEST_LACKS_NATIVE_CLANG = None
EMTEST_VERBOSE = None
EMTEST_REBASELINE = None
EMTEST_FORCE64 = None
# Verbosity level control for subprocess calls to configure + make.
# 0: disabled.
# 1: Log stderr of configure/make.
# 2: Log stdout and stderr configure/make. Print out subprocess commands that were executed.
# 3: Log stdout and stderr, and pass VERBOSE=1 to CMake/configure/make steps.
EMTEST_BUILD_VERBOSE = int(os.getenv('EMTEST_BUILD_VERBOSE', '0'))
if 'EM_BUILD_VERBOSE' in os.environ:
exit_with_error('EM_BUILD_VERBOSE has been renamed to EMTEST_BUILD_VERBOSE')
# Special value for passing to assert_returncode which means we expect that program
# to fail with non-zero return code, but we don't care about specifically which one.
NON_ZERO = -1
TEST_ROOT = path_from_root('tests')
WEBIDL_BINDER = shared.bat_suffix(path_from_root('tools/webidl_binder'))
EMBUILDER = shared.bat_suffix(path_from_root('embuilder'))
EMMAKE = shared.bat_suffix(path_from_root('emmake'))
WASM_DIS = Path(building.get_binaryen_bin(), 'wasm-dis')
def delete_contents(pathname):
for entry in os.listdir(pathname):
try_delete(os.path.join(pathname, entry))
# TODO(sbc): Should we make try_delete have a stronger guarantee?
assert not os.path.exists(os.path.join(pathname, entry))
def test_file(*path_components):
"""Construct a path relative to the emscripten "tests" directory."""
return str(Path(TEST_ROOT, *path_components))
# checks if browser testing is enabled
def has_browser():
return EMTEST_BROWSER != '0'
def compiler_for(filename, force_c=False):
if shared.suffix(filename) in ('.cc', '.cxx', '.cpp') and not force_c:
return EMXX
else:
return EMCC
# Generic decorator that calls a function named 'condition' on the test class and
# skips the test if that function returns true
def skip_if(func, condition, explanation='', negate=False):
assert callable(func)
explanation_str = ' : %s' % explanation if explanation else ''
@wraps(func)
def decorated(self, *args, **kwargs):
choice = self.__getattribute__(condition)()
if negate:
choice = not choice
if choice:
self.skipTest(condition + explanation_str)
func(self, *args, **kwargs)
return decorated
def needs_dylink(func):
assert callable(func)
@wraps(func)
def decorated(self, *args, **kwargs):
self.check_dylink()
return func(self, *args, **kwargs)
return decorated
def is_slow_test(func):
assert callable(func)
@wraps(func)
def decorated(self, *args, **kwargs):
if EMTEST_SKIP_SLOW:
return self.skipTest('skipping slow tests')
return func(self, *args, **kwargs)
return decorated
def disabled(note=''):
assert not callable(note)
return unittest.skip(note)
def no_mac(note=''):
assert not callable(note)
if MACOS:
return unittest.skip(note)
return lambda f: f
def no_windows(note=''):
assert not callable(note)
if WINDOWS:
return unittest.skip(note)
return lambda f: f
def requires_native_clang(func):
assert callable(func)
@wraps(func)
def decorated(self, *args, **kwargs):
if EMTEST_LACKS_NATIVE_CLANG:
return self.skipTest('native clang tests are disabled')
return func(self, *args, **kwargs)
return decorated
def require_node(func):
assert callable(func)
def decorated(self, *args, **kwargs):
self.require_node()
return func(self, *args, **kwargs)
return decorated
def require_v8(func):
assert callable(func)
def decorated(self, *args, **kwargs):
self.require_v8()
return func(self, *args, **kwargs)
return decorated
def node_pthreads(f):
@wraps(f)
def decorated(self, *args, **kwargs):
self.setup_node_pthreads()
f(self, *args, **kwargs)
return decorated
@contextlib.contextmanager
def env_modify(updates):
"""A context manager that updates os.environ."""
# This could also be done with mock.patch.dict() but taking a dependency
# on the mock library is probably not worth the benefit.
old_env = os.environ.copy()
print("env_modify: " + str(updates))
# Seting a value to None means clear the environment variable
clears = [key for key, value in updates.items() if value is None]
updates = {key: value for key, value in updates.items() if value is not None}
os.environ.update(updates)
for key in clears:
if key in os.environ:
del os.environ[key]
try:
yield
finally:
os.environ.clear()
os.environ.update(old_env)
# Decorator version of env_modify
def with_env_modify(updates):
assert not callable(updates)
def decorated(f):
def modified(self, *args, **kwargs):
with env_modify(updates):
return f(self, *args, **kwargs)
return modified
return decorated
def also_with_minimal_runtime(f):
assert callable(f)
def metafunc(self, with_minimal_runtime):
assert self.get_setting('MINIMAL_RUNTIME') is None
if with_minimal_runtime:
self.set_setting('MINIMAL_RUNTIME', 1)
f(self)
metafunc._parameterize = {'': (False,),
'minimal_runtime': (True,)}
return metafunc
def also_with_wasm_bigint(f):
assert callable(f)
def metafunc(self, with_bigint):
if with_bigint:
if not self.is_wasm():
self.skipTest('wasm2js does not support WASM_BIGINT')
if self.get_setting('WASM_BIGINT') is not None:
self.skipTest('redundant in bigint test config')
self.set_setting('WASM_BIGINT')
self.require_node()
self.node_args.append('--experimental-wasm-bigint')
f(self)
else:
f(self)
metafunc._parameterize = {'': (False,),
'bigint': (True,)}
return metafunc
def ensure_dir(dirname):
dirname = Path(dirname)
dirname.mkdir(parents=True, exist_ok=True)
def limit_size(string, maxbytes=800000 * 20, maxlines=100000, max_line=5000):
lines = string.splitlines()
for i, line in enumerate(lines):
if len(line) > max_line:
lines[i] = line[:max_line] + '[..]'
if len(lines) > maxlines:
lines = lines[0:maxlines // 2] + ['[..]'] + lines[-maxlines // 2:]
string = '\n'.join(lines) + '\n'
if len(string) > maxbytes:
string = string[0:maxbytes // 2] + '\n[..]\n' + string[-maxbytes // 2:]
return string
def create_file(name, contents, binary=False):
name = Path(name)
assert not name.is_absolute()
if binary:
name.write_bytes(contents)
else:
name.write_text(contents)
def make_executable(name):
Path(name).chmod(stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
def parameterized(parameters):
"""
Mark a test as parameterized.
Usage:
@parameterized({
'subtest1': (1, 2, 3),
'subtest2': (4, 5, 6),
})
def test_something(self, a, b, c):
... # actual test body
This is equivalent to defining two tests:
def test_something_subtest1(self):
# runs test_something(1, 2, 3)
def test_something_subtest2(self):
# runs test_something(4, 5, 6)
"""
def decorator(func):
func._parameterize = parameters
return func
return decorator
class RunnerMeta(type):
@classmethod
def make_test(mcs, name, func, suffix, args):
"""
This is a helper function to create new test functions for each parameterized form.
:param name: the original name of the function
:param func: the original function that we are parameterizing
:param suffix: the suffix to append to the name of the function for this parameterization
:param args: the positional arguments to pass to the original function for this parameterization
:returns: a tuple of (new_function_name, new_function_object)
"""
# Create the new test function. It calls the original function with the specified args.
# We use @functools.wraps to copy over all the function attributes.
@wraps(func)
def resulting_test(self):
return func(self, *args)
# Add suffix to the function name so that it displays correctly.
if suffix:
resulting_test.__name__ = f'{name}_{suffix}'
else:
resulting_test.__name__ = name
# On python 3, functions have __qualname__ as well. This is a full dot-separated path to the
# function. We add the suffix to it as well.
resulting_test.__qualname__ = f'{func.__qualname__}_{suffix}'
return resulting_test.__name__, resulting_test
def __new__(mcs, name, bases, attrs):
# This metaclass expands parameterized methods from `attrs` into separate ones in `new_attrs`.
new_attrs = {}
for attr_name, value in attrs.items():
# Check if a member of the new class has _parameterize, the tag inserted by @parameterized.
if hasattr(value, '_parameterize'):
# If it does, we extract the parameterization information, build new test functions.
for suffix, args in value._parameterize.items():
new_name, func = mcs.make_test(attr_name, value, suffix, args)
assert new_name not in new_attrs, 'Duplicate attribute name generated when parameterizing %s' % attr_name
new_attrs[new_name] = func
else:
# If not, we just copy it over to new_attrs verbatim.
assert attr_name not in new_attrs, '%s collided with an attribute from parameterization' % attr_name
new_attrs[attr_name] = value
# We invoke type, the default metaclass, to actually create the new class, with new_attrs.
return type.__new__(mcs, name, bases, new_attrs)
class RunnerCore(unittest.TestCase, metaclass=RunnerMeta):
# default temporary directory settings. set_temp_dir may be called later to
# override these
temp_dir = TEMP_DIR
canonical_temp_dir = get_canonical_temp_dir(TEMP_DIR)
# This avoids cluttering the test runner output, which is stderr too, with compiler warnings etc.
# Change this to None to get stderr reporting, for debugging purposes
stderr_redirect = STDOUT
def is_wasm(self):
return self.get_setting('WASM') != 0
def check_dylink(self):
if self.get_setting('ALLOW_MEMORY_GROWTH') == 1 and not self.is_wasm():
self.skipTest('no dynamic linking with memory growth (without wasm)')
if not self.is_wasm():
self.skipTest('no dynamic linking support in wasm2js yet')
if '-fsanitize=address' in self.emcc_args:
self.skipTest('no dynamic linking support in ASan yet')
if '-fsanitize=leak' in self.emcc_args:
self.skipTest('no dynamic linking support in LSan yet')
if '-fsanitize=undefined' in self.emcc_args:
self.skipTest('no dynamic linking support in UBSan yet')
def require_v8(self):
if not config.V8_ENGINE or config.V8_ENGINE not in config.JS_ENGINES:
if 'EMTEST_SKIP_V8' in os.environ:
self.skipTest('test requires v8 and EMTEST_SKIP_V8 is set')
else:
self.fail('d8 required to run this test. Use EMTEST_SKIP_V8 to skip')
self.js_engines = [config.V8_ENGINE]
self.emcc_args.append('-sENVIRONMENT=shell')
def require_node(self):
if not config.NODE_JS or config.NODE_JS not in config.JS_ENGINES:
if 'EMTEST_SKIP_NODE' in os.environ:
self.skipTest('test requires node and EMTEST_SKIP_NODE is set')
else:
self.fail('node required to run this test. Use EMTEST_SKIP_NODE to skip')
if self.get_setting('MEMORY64') == 1:
self.skipTest("MEMORY64=1 tests don't yet run under node")
self.js_engines = [config.NODE_JS]
def setup_node_pthreads(self):
self.require_node()
self.set_setting('USE_PTHREADS')
self.emcc_args += ['-Wno-pthreads-mem-growth']
if self.get_setting('MINIMAL_RUNTIME'):
self.skipTest('node pthreads not yet supported with MINIMAL_RUNTIME')
self.js_engines = [config.NODE_JS]
self.node_args += ['--experimental-wasm-threads', '--experimental-wasm-bulk-memory']
def uses_memory_init_file(self):
if self.get_setting('SIDE_MODULE') or (self.is_wasm() and not self.get_setting('WASM2JS')):
return False
elif '--memory-init-file' in self.emcc_args:
return int(self.emcc_args[self.emcc_args.index('--memory-init-file') + 1])
else:
# side modules handle memory differently; binaryen puts the memory in the wasm module
opt_supports = any(opt in self.emcc_args for opt in ('-O2', '-O3', '-Os', '-Oz'))
return opt_supports
def set_temp_dir(self, temp_dir):
self.temp_dir = temp_dir
self.canonical_temp_dir = get_canonical_temp_dir(self.temp_dir)
# Explicitly set dedicated temporary directory for parallel tests
os.environ['EMCC_TEMP_DIR'] = self.temp_dir
@classmethod
def setUpClass(cls):
super().setUpClass()
print('(checking sanity from test runner)') # do this after we set env stuff
shared.check_sanity(force=True)
def setUp(self):
super().setUp()
self.settings_mods = {}
self.emcc_args = ['-Werror', '-Wno-limited-postlink-optimizations']
# We want to be strict about closure warnings in our test code.
# TODO(sbc): Remove this if we make it the default for `-Werror`:
# https://github.com/emscripten-core/emscripten/issues/16205):
self.ldflags = ['-sCLOSURE_WARNINGS=error']
self.node_args = [
# Increate stack trace limit to maximise usefulness of test failure reports
'--stack-trace-limit=50',
# Opt in to node v15 default behaviour:
# https://nodejs.org/api/cli.html#cli_unhandled_rejections_mode
'--unhandled-rejections=throw',
# Include backtrace for all uncuaght exceptions (not just Error).
'--trace-uncaught',
]
self.v8_args = []
self.env = {}
self.temp_files_before_run = []
self.uses_es6 = False
self.js_engines = config.JS_ENGINES.copy()
self.wasm_engines = config.WASM_ENGINES.copy()
self.banned_js_engines = []
self.use_all_engines = EMTEST_ALL_ENGINES
if EMTEST_DETECT_TEMPFILE_LEAKS:
for root, dirnames, filenames in os.walk(self.temp_dir):
for dirname in dirnames:
self.temp_files_before_run.append(os.path.normpath(os.path.join(root, dirname)))
for filename in filenames:
self.temp_files_before_run.append(os.path.normpath(os.path.join(root, filename)))
if EMTEST_SAVE_DIR:
self.working_dir = os.path.join(self.temp_dir, 'emscripten_test')
if os.path.exists(self.working_dir):
if EMTEST_SAVE_DIR == 2:
print('Not clearing existing test directory')
else:
print('Clearing existing test directory')
# Even when --save-dir is used we still try to start with an empty directory as many tests
# expect this. --no-clean can be used to keep the old contents for the new test
# run. This can be useful when iterating on a given test with extra files you want to keep
# around in the output directory.
delete_contents(self.working_dir)
else:
print('Creating new test output directory')
ensure_dir(self.working_dir)
else:
self.working_dir = tempfile.mkdtemp(prefix='emscripten_test_' + self.__class__.__name__ + '_', dir=self.temp_dir)
os.chdir(self.working_dir)
if not EMTEST_SAVE_DIR:
self.has_prev_ll = False
for temp_file in os.listdir(TEMP_DIR):
if temp_file.endswith('.ll'):
self.has_prev_ll = True
def tearDown(self):
if not EMTEST_SAVE_DIR:
# rmtree() fails on Windows if the current working directory is inside the tree.
os.chdir(os.path.dirname(self.get_dir()))
try_delete(self.get_dir())
if EMTEST_DETECT_TEMPFILE_LEAKS and not DEBUG:
temp_files_after_run = []
for root, dirnames, filenames in os.walk(self.temp_dir):
for dirname in dirnames:
temp_files_after_run.append(os.path.normpath(os.path.join(root, dirname)))
for filename in filenames:
temp_files_after_run.append(os.path.normpath(os.path.join(root, filename)))
# Our leak detection will pick up *any* new temp files in the temp dir.
# They may not be due to us, but e.g. the browser when running browser
# tests. Until we figure out a proper solution, ignore some temp file
# names that we see on our CI infrastructure.
ignorable_file_prefixes = [
'/tmp/tmpaddon',
'/tmp/circleci-no-output-timeout',
'/tmp/wasmer'
]
left_over_files = set(temp_files_after_run) - set(self.temp_files_before_run)
left_over_files = [f for f in left_over_files if not any([f.startswith(prefix) for prefix in ignorable_file_prefixes])]
if len(left_over_files):
print('ERROR: After running test, there are ' + str(len(left_over_files)) + ' new temporary files/directories left behind:', file=sys.stderr)
for f in left_over_files:
print('leaked file: ' + f, file=sys.stderr)
self.fail('Test leaked ' + str(len(left_over_files)) + ' temporary files!')
def get_setting(self, key, default=None):
return self.settings_mods.get(key, default)
def set_setting(self, key, value=1):
if value is None:
self.clear_setting(key)
if type(value) == bool:
value = int(value)
self.settings_mods[key] = value
def has_changed_setting(self, key):
return key in self.settings_mods
def clear_setting(self, key):
self.settings_mods.pop(key, None)
def serialize_settings(self):
ret = []
for key, value in self.settings_mods.items():
if value == 1:
ret.append(f'-s{key}')
elif type(value) == list:
ret.append(f'-s{key}={",".join(value)}')
else:
ret.append(f'-s{key}={value}')
return ret
def get_dir(self):
return self.working_dir
def in_dir(self, *pathelems):
return os.path.join(self.get_dir(), *pathelems)
def add_pre_run(self, code):
create_file('prerun.js', 'Module.preRun = function() { %s }' % code)
self.emcc_args += ['--pre-js', 'prerun.js']
def add_post_run(self, code):
create_file('postrun.js', 'Module.postRun = function() { %s }' % code)
self.emcc_args += ['--pre-js', 'postrun.js']
def add_on_exit(self, code):
create_file('onexit.js', 'Module.onExit = function() { %s }' % code)
self.emcc_args += ['--pre-js', 'onexit.js']
# returns the full list of arguments to pass to emcc
# param @main_file whether this is the main file of the test. some arguments
# (like --pre-js) do not need to be passed when building
# libraries, for example
def get_emcc_args(self, main_file=False, ldflags=True):
args = self.serialize_settings() + self.emcc_args
if ldflags:
args += self.ldflags
if not main_file:
for i, arg in enumerate(args):
if arg in ('--pre-js', '--post-js'):
args[i] = None
args[i + 1] = None
args = [arg for arg in args if arg is not None]
return args
def verify_es5(self, filename):
es_check = shared.get_npm_cmd('es-check')
# use --quiet once its available
# See: https://github.com/dollarshaveclub/es-check/pull/126/
es_check_env = os.environ.copy()
es_check_env['PATH'] = os.path.dirname(config.NODE_JS[0]) + os.pathsep + es_check_env['PATH']
try:
# es-check prints the details of the errors to stdout, but it also prints
# stuff in the case there are no errors:
# ES-Check: there were no ES version matching errors!
# pipe stdout and stderr so that we can choose if/when to print this
# output and avoid spamming stdout when tests are successful.
shared.run_process(es_check + ['es5', os.path.abspath(filename)], stdout=PIPE, stderr=STDOUT, env=es_check_env)
except subprocess.CalledProcessError as e:
print(e.stdout)
self.fail('es-check failed to verify ES5 output compliance')
# Build JavaScript code from source code
def build(self, filename, libraries=[], includes=[], force_c=False, js_outfile=True, emcc_args=[], output_basename=None):
suffix = '.js' if js_outfile else '.wasm'
compiler = [compiler_for(filename, force_c)]
if compiler[0] == EMCC:
# TODO(https://github.com/emscripten-core/emscripten/issues/11121)
# For historical reasons emcc compiles and links as C++ by default.
# However we want to run our tests in a more strict manner. We can
# remove this if the issue above is ever fixed.
compiler.append('-sNO_DEFAULT_TO_CXX')
if force_c:
compiler.append('-xc')
if output_basename:
output = output_basename + suffix
else:
basename = os.path.basename(filename)
output = shared.unsuffixed(basename) + suffix
cmd = compiler + [filename, '-o', output] + self.get_emcc_args(main_file=True) + emcc_args + libraries
if shared.suffix(filename) not in ('.i', '.ii'):
# Add the location of the test file to include path.
cmd += ['-I.']
cmd += ['-I' + str(include) for include in includes]
self.run_process(cmd, stderr=self.stderr_redirect if not DEBUG else None)
self.assertExists(output)
if js_outfile and self.uses_memory_init_file():
src = read_file(output)
# side memory init file, or an empty one in the js
assert ('/* memory initializer */' not in src) or ('/* memory initializer */ allocate([]' in src)
return output
def get_func(self, src, name):
start = src.index('function ' + name + '(')
t = start
n = 0
while True:
if src[t] == '{':
n += 1
elif src[t] == '}':
n -= 1
if n == 0:
return src[start:t + 1]
t += 1
assert t < len(src)
def count_funcs(self, javascript_file):
num_funcs = 0
start_tok = "// EMSCRIPTEN_START_FUNCS"
end_tok = "// EMSCRIPTEN_END_FUNCS"
start_off = 0
end_off = 0
js = read_file(javascript_file)
blob = "".join(js.splitlines())
start_off = blob.find(start_tok) + len(start_tok)
end_off = blob.find(end_tok)
asm_chunk = blob[start_off:end_off]
num_funcs = asm_chunk.count('function ')
return num_funcs
def count_wasm_contents(self, wasm_binary, what):
out = self.run_process([os.path.join(building.get_binaryen_bin(), 'wasm-opt'), wasm_binary, '--metrics'], stdout=PIPE).stdout
# output is something like
# [?] : 125
for line in out.splitlines():
if '[' + what + ']' in line:
ret = line.split(':')[1].strip()
return int(ret)
self.fail('Failed to find [%s] in wasm-opt output' % what)
def get_wasm_text(self, wasm_binary):
return self.run_process([WASM_DIS, wasm_binary], stdout=PIPE).stdout
def is_exported_in_wasm(self, name, wasm):
wat = self.get_wasm_text(wasm)
return ('(export "%s"' % name) in wat
def measure_wasm_code_lines(self, wasm):
wat_lines = self.get_wasm_text(wasm).splitlines()
non_data_lines = [line for line in wat_lines if '(data ' not in line]
return len(non_data_lines)
def run_js(self, filename, engine=None, args=[],
output_nicerizer=None,
assert_returncode=0,
interleaved_output=True):
# use files, as PIPE can get too full and hang us
stdout_file = self.in_dir('stdout')
stderr_file = None
if interleaved_output:
stderr = STDOUT
else:
stderr_file = self.in_dir('stderr')
stderr = open(stderr_file, 'w')
error = None
timeout_error = None
if not engine:
engine = self.js_engines[0]
if engine == config.NODE_JS:
engine = engine + self.node_args
if engine == config.V8_ENGINE:
engine = engine + self.v8_args
try:
jsrun.run_js(filename, engine, args,
stdout=open(stdout_file, 'w'),
stderr=stderr,
assert_returncode=assert_returncode)
except subprocess.TimeoutExpired as e:
timeout_error = e
except subprocess.CalledProcessError as e:
error = e
# Make sure that we produced proper line endings to the .js file we are about to run.
if not filename.endswith('.wasm'):
self.assertEqual(line_endings.check_line_endings(filename), 0)
ret = read_file(stdout_file)
if not interleaved_output:
ret += read_file(stderr_file)
if output_nicerizer:
ret = output_nicerizer(ret)
if error or timeout_error or EMTEST_VERBOSE:
ret = limit_size(ret)
print('-- begin program output --')
print(read_file(stdout_file), end='')
print('-- end program output --')
if not interleaved_output:
print('-- begin program stderr --')
print(read_file(stderr_file), end='')
print('-- end program stderr --')
if timeout_error:
raise timeout_error
if error:
if assert_returncode == NON_ZERO:
self.fail('JS subprocess unexpectedly succeeded (%s): Output:\n%s' % (error.cmd, ret))
else:
self.fail('JS subprocess failed (%s): %s. Output:\n%s' % (error.cmd, error.returncode, ret))
# We should pass all strict mode checks
self.assertNotContained('strict warning:', ret)
return ret
def assertExists(self, filename, msg=None):
if not msg:
msg = 'Expected file not found: ' + filename
self.assertTrue(os.path.exists(filename), msg)
def assertNotExists(self, filename, msg=None):
if not msg:
msg = 'Unexpected file exists: ' + filename
self.assertFalse(os.path.exists(filename), msg)
# Tests that the given two paths are identical, modulo path delimiters. E.g. "C:/foo" is equal to "C:\foo".
def assertPathsIdentical(self, path1, path2):
path1 = path1.replace('\\', '/')
path2 = path2.replace('\\', '/')
return self.assertIdentical(path1, path2)
# Tests that the given two multiline text content are identical, modulo line
# ending differences (\r\n on Windows, \n on Unix).
def assertTextDataIdentical(self, text1, text2, msg=None,
fromfile='expected', tofile='actual'):
text1 = text1.replace('\r\n', '\n')
text2 = text2.replace('\r\n', '\n')
return self.assertIdentical(text1, text2, msg, fromfile, tofile)
def assertIdentical(self, values, y, msg=None,
fromfile='expected', tofile='actual'):
if type(values) not in (list, tuple):
values = [values]
for x in values:
if x == y:
return # success
diff_lines = difflib.unified_diff(x.splitlines(), y.splitlines(),
fromfile=fromfile, tofile=tofile)
diff = ''.join([a.rstrip() + '\n' for a in diff_lines])
if EMTEST_VERBOSE:
print("Expected to have '%s' == '%s'" % (limit_size(values[0]), limit_size(y)))
fail_message = 'Unexpected difference:\n' + limit_size(diff)
if not EMTEST_VERBOSE:
fail_message += '\nFor full output run with --verbose.'
if msg:
fail_message += '\n' + msg
self.fail(fail_message)
def assertTextDataContained(self, text1, text2):
text1 = text1.replace('\r\n', '\n')
text2 = text2.replace('\r\n', '\n')
return self.assertContained(text1, text2)
def assertFileContents(self, filename, contents):
if EMTEST_VERBOSE:
print(f'Comparing results contents of file: {filename}')
contents = contents.replace('\r', '')
if EMTEST_REBASELINE:
with open(filename, 'w') as f:
f.write(contents)
return
if not os.path.exists(filename):
self.fail('Test expectation file not found: ' + filename + '.\n' +
'Run with --rebaseline to generate.')
expected_content = read_file(filename)
message = "Run with --rebaseline to automatically update expectations"
self.assertTextDataIdentical(expected_content, contents, message,
filename, filename + '.new')
def assertContained(self, values, string, additional_info=''):
if type(values) not in [list, tuple]:
values = [values]
if callable(string):
string = string()
if not any(v in string for v in values):
diff = difflib.unified_diff(values[0].split('\n'), string.split('\n'), fromfile='expected', tofile='actual')
diff = ''.join(a.rstrip() + '\n' for a in diff)
self.fail("Expected to find '%s' in '%s', diff:\n\n%s\n%s" % (
limit_size(values[0]), limit_size(string), limit_size(diff),
additional_info
))
def assertNotContained(self, value, string):
if callable(value):
value = value() # lazy loading
if callable(string):
string = string()
if value in string:
self.fail("Expected to NOT find '%s' in '%s'" % (limit_size(value), limit_size(string)))
def assertContainedIf(self, value, string, condition):
if condition:
self.assertContained(value, string)
else:
self.assertNotContained(value, string)
def assertBinaryEqual(self, file1, file2):
self.assertEqual(os.path.getsize(file1),
os.path.getsize(file2))
self.assertEqual(read_binary(file1),
read_binary(file2))
library_cache = {}
def get_build_dir(self):
ret = os.path.join(self.get_dir(), 'building')
ensure_dir(ret)
return ret
def get_library(self, name, generated_libs, configure=['sh', './configure'],
configure_args=[], make=['make'], make_args=None,
env_init=None, cache_name_extra='', native=False):
if env_init is None:
env_init = {}
if make_args is None:
make_args = ['-j', str(shared.get_num_cores())]
build_dir = self.get_build_dir()
output_dir = self.get_dir()
# get_library() is used to compile libraries, and not link executables,
# so we don't want to pass linker flags here (emscripten warns if you
# try to pass linker settings when compiling).
emcc_args = self.get_emcc_args(ldflags=False)
hash_input = (str(emcc_args) + ' $ ' + str(env_init)).encode('utf-8')
cache_name = name + ','.join([opt for opt in emcc_args if len(opt) < 7]) + '_' + hashlib.md5(hash_input).hexdigest() + cache_name_extra
valid_chars = "_%s%s" % (string.ascii_letters, string.digits)
cache_name = ''.join([(c if c in valid_chars else '_') for c in cache_name])
if self.library_cache.get(cache_name):
print('<load %s from cache> ' % cache_name, file=sys.stderr)
generated_libs = []
for basename, contents in self.library_cache[cache_name]:
bc_file = os.path.join(build_dir, cache_name + '_' + basename)
write_binary(bc_file, contents)
generated_libs.append(bc_file)
return generated_libs
print(f'<building and saving {cache_name} into cache>', file=sys.stderr)
if configure is not None:
# Avoid += so we don't mutate the default arg
configure = configure + configure_args
cflags = ' '.join(emcc_args)
env_init.setdefault('CFLAGS', cflags)
env_init.setdefault('CXXFLAGS', cflags)
return build_library(name, build_dir, output_dir, generated_libs, configure,
make, make_args, self.library_cache,
cache_name, env_init=env_init, native=native)
def clear(self):
delete_contents(self.get_dir())
if EMSCRIPTEN_TEMP_DIR:
delete_contents(EMSCRIPTEN_TEMP_DIR)
def run_process(self, cmd, check=True, **args):
# Wrapper around shared.run_process. This is desirable so that the tests
# can fail (in the unittest sense) rather than error'ing.
# In the long run it would nice to completely remove the dependency on
# core emscripten code (shared.py) here.
try:
return shared.run_process(cmd, check=check, **args)
except subprocess.CalledProcessError as e:
if check and e.returncode != 0:
print(e.stdout)
print(e.stderr)
self.fail(f'subprocess exited with non-zero return code({e.returncode}): `{shared.shlex_join(cmd)}`')
def emcc(self, filename, args=[], output_filename=None, **kwargs):
cmd = [compiler_for(filename), filename] + self.get_emcc_args(ldflags='-c' not in args) + args
if output_filename:
cmd += ['-o', output_filename]
self.run_process(cmd, **kwargs)
# Shared test code between main suite and others
def expect_fail(self, cmd, **args):
"""Run a subprocess and assert that it returns non-zero.
Return the stderr of the subprocess.
"""
proc = self.run_process(cmd, check=False, stderr=PIPE, **args)
self.assertNotEqual(proc.returncode, 0, 'subprocess unexpectedly succeeded. stderr:\n' + proc.stderr)
# When we check for failure we expect a user-visible error, not a traceback.
# However, on windows a python traceback can happen randomly sometimes,
# due to "Access is denied" https://github.com/emscripten-core/emscripten/issues/718
if not WINDOWS or 'Access is denied' not in proc.stderr:
self.assertNotContained('Traceback', proc.stderr)
return proc.stderr
# excercise dynamic linker.
#
# test that linking to shared library B, which is linked to A, loads A as well.
# main is also linked to C, which is also linked to A. A is loaded/initialized only once.
#
# B
# main < > A
# C
#
# this test is used by both test_core and test_browser.
# when run under broswer it excercises how dynamic linker handles concurrency
# - because B and C are loaded in parallel.
def _test_dylink_dso_needed(self, do_run):
create_file('liba.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
static const char *afunc_prev;
extern "C" {
EMSCRIPTEN_KEEPALIVE void afunc(const char *s);
}
void afunc(const char *s) {
printf("a: %s (prev: %s)\n", s, afunc_prev);
afunc_prev = s;
}
struct ainit {
ainit() {
puts("a: loaded");
}
};
static ainit _;
''')
create_file('libb.c', r'''
#include <emscripten.h>
void afunc(const char *s);
EMSCRIPTEN_KEEPALIVE void bfunc() {
afunc("b");
}
''')
create_file('libc.c', r'''
#include <emscripten.h>
void afunc(const char *s);
EMSCRIPTEN_KEEPALIVE void cfunc() {
afunc("c");
}
''')
# _test_dylink_dso_needed can be potentially called several times by a test.
# reset dylink-related options first.
self.clear_setting('MAIN_MODULE')
self.clear_setting('SIDE_MODULE')
# XXX in wasm each lib load currently takes 5MB; default INITIAL_MEMORY=16MB is thus not enough
self.set_setting('INITIAL_MEMORY', '32mb')
so = '.wasm' if self.is_wasm() else '.js'
def ccshared(src, linkto=[]):
cmdv = [EMCC, src, '-o', shared.unsuffixed(src) + so, '-sSIDE_MODULE'] + self.get_emcc_args()
cmdv += linkto
self.run_process(cmdv)
ccshared('liba.cpp')
ccshared('libb.c', ['liba' + so])
ccshared('libc.c', ['liba' + so])
self.set_setting('MAIN_MODULE')
extra_args = ['-L.', 'libb' + so, 'libc' + so]
do_run(r'''
#ifdef __cplusplus
extern "C" {
#endif
void bfunc();
void cfunc();
#ifdef __cplusplus
}
#endif
int test_main() {
bfunc();
cfunc();
return 0;
}
''',
'a: loaded\na: b (prev: (null))\na: c (prev: b)\n', emcc_args=extra_args)
for libname in ['liba', 'libb', 'libc']:
self.emcc_args += ['--embed-file', libname + so]
do_run(r'''
#include <assert.h>
#include <dlfcn.h>
#include <stddef.h>
int test_main() {
void *bdso, *cdso;
void (*bfunc_ptr)(), (*cfunc_ptr)();
// FIXME for RTLD_LOCAL binding symbols to loaded lib is not currently working
bdso = dlopen("libb%(so)s", RTLD_NOW|RTLD_GLOBAL);
assert(bdso != NULL);
cdso = dlopen("libc%(so)s", RTLD_NOW|RTLD_GLOBAL);
assert(cdso != NULL);
bfunc_ptr = (void (*)())dlsym(bdso, "bfunc");
assert(bfunc_ptr != NULL);
cfunc_ptr = (void (*)())dlsym(cdso, "cfunc");
assert(cfunc_ptr != NULL);
bfunc_ptr();
cfunc_ptr();
return 0;
}
''' % locals(),
'a: loaded\na: b (prev: (null))\na: c (prev: b)\n')
def filtered_js_engines(self, js_engines=None):
if js_engines is None:
js_engines = self.js_engines
for engine in js_engines:
assert engine in config.JS_ENGINES, "js engine does not exist in config.JS_ENGINES"
assert type(engine) == list
for engine in self.banned_js_engines:
assert type(engine) in (list, type(None))
banned = [b[0] for b in self.banned_js_engines if b]
return [engine for engine in js_engines if engine and engine[0] not in banned]
def do_run(self, src, expected_output=None, force_c=False, **kwargs):
if 'no_build' in kwargs:
filename = src
else:
if force_c:
filename = 'src.c'
else:
filename = 'src.cpp'
write_file(filename, src)
return self._build_and_run(filename, expected_output, **kwargs)
def do_runf(self, filename, expected_output=None, **kwargs):
return self._build_and_run(filename, expected_output, **kwargs)
## Just like `do_run` but with filename of expected output
def do_run_from_file(self, filename, expected_output_filename, **kwargs):
return self._build_and_run(filename, read_file(expected_output_filename), **kwargs)
def do_run_in_out_file_test(self, *path, **kwargs):
srcfile = test_file(*path)
out_suffix = kwargs.pop('out_suffix', '')
outfile = shared.unsuffixed(srcfile) + out_suffix + '.out'
expected = read_file(outfile)
return self._build_and_run(srcfile, expected, **kwargs)
## Does a complete test - builds, runs, checks output, etc.
def _build_and_run(self, filename, expected_output, args=[], output_nicerizer=None,
no_build=False,
js_engines=None, libraries=[],
includes=[],
assert_returncode=0, assert_identical=False, assert_all=False,
check_for_error=True, force_c=False, emcc_args=[],
interleaved_output=True,
regex=False,
output_basename=None):
logger.debug(f'_build_and_run: {filename}')
if no_build:
js_file = filename
else:
js_file = self.build(filename, libraries=libraries, includes=includes,
force_c=force_c, emcc_args=emcc_args,
output_basename=output_basename)
self.assertExists(js_file)
engines = self.filtered_js_engines(js_engines)
if len(engines) > 1 and not self.use_all_engines:
engines = engines[:1]
# In standalone mode, also add wasm vms as we should be able to run there too.
if self.get_setting('STANDALONE_WASM'):
# TODO once standalone wasm support is more stable, apply use_all_engines
# like with js engines, but for now as we bring it up, test in all of them
if not self.wasm_engines:
logger.warning('no wasm engine was found to run the standalone part of this test')
engines += self.wasm_engines
if self.get_setting('WASM2C') and not EMTEST_LACKS_NATIVE_CLANG:
# compile the c file to a native executable.
c = shared.replace_suffix(js_file, '.wasm.c')
executable = shared.replace_suffix(js_file, '.exe')
cmd = [shared.CLANG_CC, c, '-o', executable] + clang_native.get_clang_native_args()
self.run_process(cmd, env=clang_native.get_clang_native_env())
# we can now run the executable directly, without an engine, which
# we indicate with None as the engine
engines += [[None]]
if len(engines) == 0:
self.skipTest('No JS engine present to run this test with. Check %s and the paths therein.' % config.EM_CONFIG)
for engine in engines:
js_output = self.run_js(js_file, engine, args,
output_nicerizer=output_nicerizer,
assert_returncode=assert_returncode,
interleaved_output=interleaved_output)
js_output = js_output.replace('\r\n', '\n')
if expected_output:
try:
if assert_identical:
self.assertIdentical(expected_output, js_output)
elif assert_all or len(expected_output) == 1:
for o in expected_output:
if regex:
self.assertTrue(re.search(o, js_output), 'Expected regex "%s" to match on:\n%s' % (regex, js_output))
else:
self.assertContained(o, js_output)
else:
if regex:
match_any = any(re.search(o, js_output) for o in expected_output)
self.assertTrue(match_any, 'Expected at least one of "%s" to match on:\n%s' % (expected_output, js_output))
else:
self.assertContained(expected_output, js_output)
if assert_returncode == 0 and check_for_error:
self.assertNotContained('ERROR', js_output)
except Exception:
print('(test did not pass in JS engine: %s)' % engine)
raise
return js_output
def get_freetype_library(self):
if '-Werror' in self.emcc_args:
self.emcc_args.remove('-Werror')
return self.get_library(os.path.join('third_party', 'freetype'), os.path.join('objs', '.libs', 'libfreetype.a'), configure_args=['--disable-shared', '--without-zlib'])
def get_poppler_library(self, env_init=None):
# The fontconfig symbols are all missing from the poppler build
# e.g. FcConfigSubstitute
self.set_setting('ERROR_ON_UNDEFINED_SYMBOLS', 0)
self.emcc_args += [
'-I' + test_file('third_party/freetype/include'),
'-I' + test_file('third_party/poppler/include')
]
freetype = self.get_freetype_library()
# Poppler has some pretty glaring warning. Suppress them to keep the
# test output readable.
if '-Werror' in self.emcc_args:
self.emcc_args.remove('-Werror')
self.emcc_args += [
'-Wno-sentinel',
'-Wno-logical-not-parentheses',
'-Wno-unused-private-field',
'-Wno-tautological-compare',
'-Wno-unknown-pragmas',
]
env_init = env_init.copy() if env_init else {}
env_init['FONTCONFIG_CFLAGS'] = ' '
env_init['FONTCONFIG_LIBS'] = ' '
poppler = self.get_library(
os.path.join('third_party', 'poppler'),
[os.path.join('utils', 'pdftoppm.o'), os.path.join('utils', 'parseargs.o'), os.path.join('poppler', '.libs', 'libpoppler.a')],
env_init=env_init,
configure_args=['--disable-libjpeg', '--disable-libpng', '--disable-poppler-qt', '--disable-poppler-qt4', '--disable-cms', '--disable-cairo-output', '--disable-abiword-output', '--disable-shared'])
return poppler + freetype
def get_zlib_library(self, cmake):
assert cmake or not WINDOWS, 'on windows, get_zlib_library only supports cmake'
old_args = self.emcc_args.copy()
# inflate.c does -1L << 16
self.emcc_args.append('-Wno-shift-negative-value')
# adler32.c uses K&R sytyle function declarations
self.emcc_args.append('-Wno-deprecated-non-prototype')
# Work around configure-script error. TODO: remove when
# https://github.com/emscripten-core/emscripten/issues/16908 is fixed
self.emcc_args.append('-Wno-pointer-sign')
if cmake:
rtn = self.get_library(os.path.join('third_party', 'zlib'), os.path.join('libz.a'),
configure=['cmake', '.'],
make=['cmake', '--build', '.', '--'],
make_args=[])
else:
rtn = self.get_library(os.path.join('third_party', 'zlib'), os.path.join('libz.a'), make_args=['libz.a'])
self.emcc_args = old_args
return rtn
# Run a server and a web page. When a test runs, we tell the server about it,
# which tells the web page, which then opens a window with the test. Doing
# it this way then allows the page to close() itself when done.
def harness_server_func(in_queue, out_queue, port):
class TestServerHandler(SimpleHTTPRequestHandler):
# Request header handler for default do_GET() path in
# SimpleHTTPRequestHandler.do_GET(self) below.
def send_head(self):
if self.path.endswith('.js'):
path = self.translate_path(self.path)
try:
f = open(path, 'rb')
except IOError:
self.send_error(404, "File not found: " + path)
return None
self.send_response(200)
self.send_header('Content-type', 'application/javascript')
self.send_header('Connection', 'close')
self.end_headers()
return f
else:
return SimpleHTTPRequestHandler.send_head(self)
# Add COOP, COEP, CORP, and no-caching headers
def end_headers(self):
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Cross-Origin-Opener-Policy', 'same-origin')
self.send_header('Cross-Origin-Embedder-Policy', 'require-corp')
self.send_header('Cross-Origin-Resource-Policy', 'cross-origin')
self.send_header('Cache-Control', 'no-cache, no-store, must-revalidate')
return SimpleHTTPRequestHandler.end_headers(self)
def do_GET(self):
if self.path == '/run_harness':
if DEBUG:
print('[server startup]')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(read_binary(test_file('browser_harness.html')))
elif 'report_' in self.path:
# the test is reporting its result. first change dir away from the
# test dir, as it will be deleted now that the test is finishing, and
# if we got a ping at that time, we'd return an error
os.chdir(path_from_root())
# for debugging, tests may encode the result and their own url (window.location) as result|url
if '|' in self.path:
path, url = self.path.split('|', 1)
else:
path = self.path
url = '?'
if DEBUG:
print('[server response:', path, url, ']')
if out_queue.empty():
out_queue.put(path)
else:
# a badly-behaving test may send multiple xhrs with reported results; we just care
# about the first (if we queued the others, they might be read as responses for
# later tests, or maybe the test sends more than one in a racy manner).
# we place 'None' in the queue here so that the outside knows something went wrong
# (none is not a valid value otherwise; and we need the outside to know because if we
# raise an error in here, it is just swallowed in python's webserver code - we want
# the test to actually fail, which a webserver response can't do).
out_queue.put(None)
raise Exception('browser harness error, excessive response to server - test must be fixed! "%s"' % self.path)
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.send_header('Cache-Control', 'no-cache, must-revalidate')
self.send_header('Connection', 'close')
self.send_header('Expires', '-1')
self.end_headers()
self.wfile.write(b'OK')
elif 'stdout=' in self.path or 'stderr=' in self.path:
'''
To get logging to the console from browser tests, add this to
print/printErr/the exception handler in src/shell.html:
var xhr = new XMLHttpRequest();
xhr.open('GET', encodeURI('http://localhost:8888?stdout=' + text));
xhr.send();
'''
print('[client logging:', unquote_plus(self.path), ']')
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
elif self.path == '/check':
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
if not in_queue.empty():
# there is a new test ready to be served
url, dir = in_queue.get()
if DEBUG:
print('[queue command:', url, dir, ']')
assert in_queue.empty(), 'should not be any blockage - one test runs at a time'
assert out_queue.empty(), 'the single response from the last test was read'
# tell the browser to load the test
self.wfile.write(b'COMMAND:' + url.encode('utf-8'))
# move us to the right place to serve the files for the new test
os.chdir(dir)
else:
# the browser must keep polling
self.wfile.write(b'(wait)')
else:
# Use SimpleHTTPServer default file serving operation for GET.
if DEBUG:
print('[simple HTTP serving:', unquote_plus(self.path), ']')
SimpleHTTPRequestHandler.do_GET(self)
def log_request(code=0, size=0):
# don't log; too noisy
pass
# allows streaming compilation to work
SimpleHTTPRequestHandler.extensions_map['.wasm'] = 'application/wasm'
httpd = HTTPServer(('localhost', port), TestServerHandler)
httpd.serve_forever() # test runner will kill us
class Reporting(Enum):
"""When running browser tests we normally automatically include support
code for reporting results back to the browser. This enum allows tests
to decide what type of support code they need/want.
"""
NONE = 0
# Include the JS helpers for reporting results
JS_ONLY = 1
# Include C/C++ reporting code (REPORT_RESULT mactros) as well as JS helpers
FULL = 2
class BrowserCore(RunnerCore):
# note how many tests hang / do not send an output. if many of these
# happen, likely something is broken and it is best to abort the test
# suite early, as otherwise we will wait for the timeout on every
# single test (hundreds of minutes)
MAX_UNRESPONSIVE_TESTS = 10
unresponsive_tests = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@staticmethod
def browser_open(url):
if not EMTEST_BROWSER:
logger.info('Using default system browser')
webbrowser.open_new(url)
return
browser_args = shlex.split(EMTEST_BROWSER)
# If the given browser is a scalar, treat it like one of the possible types
# from https://docs.python.org/2/library/webbrowser.html
if len(browser_args) == 1:
try:
# This throws if the type of browser isn't available
webbrowser.get(browser_args[0]).open_new(url)
logger.info('Using Emscripten browser: %s', browser_args[0])
return
except webbrowser.Error:
# Ignore the exception and fallback to the custom command logic
pass
# Else assume the given browser is a specific program with additional
# parameters and delegate to that
logger.info('Using Emscripten browser: %s', str(browser_args))
subprocess.Popen(browser_args + [url])
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.also_wasm2js = int(os.getenv('EMTEST_BROWSER_ALSO_WASM2JS', '0')) == 1
cls.port = int(os.getenv('EMTEST_BROWSER_PORT', '8888'))
if not has_browser() or EMTEST_BROWSER == 'node':
return
cls.browser_timeout = 60
cls.harness_in_queue = multiprocessing.Queue()
cls.harness_out_queue = multiprocessing.Queue()
cls.harness_server = multiprocessing.Process(target=harness_server_func, args=(cls.harness_in_queue, cls.harness_out_queue, cls.port))
cls.harness_server.start()
print('[Browser harness server on process %d]' % cls.harness_server.pid)
cls.browser_open('http://localhost:%s/run_harness' % cls.port)
@classmethod
def tearDownClass(cls):
super().tearDownClass()
if not has_browser() or EMTEST_BROWSER == 'node':
return
cls.harness_server.terminate()
print('[Browser harness server terminated]')
if WINDOWS:
# On Windows, shutil.rmtree() in tearDown() raises this exception if we do not wait a bit:
# WindowsError: [Error 32] The process cannot access the file because it is being used by another process.
time.sleep(0.1)
def assert_out_queue_empty(self, who):
if not self.harness_out_queue.empty():
while not self.harness_out_queue.empty():
self.harness_out_queue.get()
raise Exception('excessive responses from %s' % who)
# @param extra_tries: how many more times to try this test, if it fails. browser tests have
# many more causes of flakiness (in particular, they do not run
# synchronously, so we have a timeout, which can be hit if the VM
# we run on stalls temporarily), so we let each test try more than
# once by default
def run_browser(self, html_file, message, expectedResult=None, timeout=None, extra_tries=1):
if not has_browser():
return
if BrowserCore.unresponsive_tests >= BrowserCore.MAX_UNRESPONSIVE_TESTS:
self.skipTest('too many unresponsive tests, skipping browser launch - check your setup!')
self.assert_out_queue_empty('previous test')
if DEBUG:
print('[browser launch:', html_file, ']')
if expectedResult is not None:
try:
self.harness_in_queue.put((
'http://localhost:%s/%s' % (self.port, html_file),
self.get_dir()
))
received_output = False
output = '[no http server activity]'
start = time.time()
if timeout is None:
timeout = self.browser_timeout
while time.time() - start < timeout:
if not self.harness_out_queue.empty():
output = self.harness_out_queue.get()
received_output = True
break
time.sleep(0.1)
if not received_output:
BrowserCore.unresponsive_tests += 1
print('[unresponsive tests: %d]' % BrowserCore.unresponsive_tests)
if output is None:
# the browser harness reported an error already, and sent a None to tell
# us to also fail the test
raise Exception('failing test due to browser harness error')
if output.startswith('/report_result?skipped:'):
self.skipTest(unquote(output[len('/report_result?skipped:'):]).strip())
else:
# verify the result, and try again if we should do so
output = unquote(output)
try:
self.assertContained(expectedResult, output)
except Exception as e:
if extra_tries > 0:
print('[test error (see below), automatically retrying]')
print(e)
return self.run_browser(html_file, message, expectedResult, timeout, extra_tries - 1)
else:
raise e
finally:
time.sleep(0.1) # see comment about Windows above
self.assert_out_queue_empty('this test')
else:
webbrowser.open_new(os.path.abspath(html_file))
print('A web browser window should have opened a page containing the results of a part of this test.')
print('You need to manually look at the page to see that it works ok: ' + message)
print('(sleeping for a bit to keep the directory alive for the web browser..)')
time.sleep(5)
print('(moving on..)')
# @manually_trigger If set, we do not assume we should run the reftest when main() is done.
# Instead, call doReftest() in JS yourself at the right time.
def reftest(self, expected, manually_trigger=False):
# make sure the pngs used here have no color correction, using e.g.
# pngcrush -rem gAMA -rem cHRM -rem iCCP -rem sRGB infile outfile
basename = os.path.basename(expected)
shutil.copyfile(expected, os.path.join(self.get_dir(), basename))
reporting = read_file(test_file('browser_reporting.js'))
write_file('reftest.js', '''
function doReftest() {
if (doReftest.done) return;
doReftest.done = true;
var img = new Image();
img.onload = function() {
assert(img.width == Module.canvas.width, 'Invalid width: ' + Module.canvas.width + ', should be ' + img.width);
assert(img.height == Module.canvas.height, 'Invalid height: ' + Module.canvas.height + ', should be ' + img.height);
var canvas = document.createElement('canvas');
canvas.width = img.width;
canvas.height = img.height;
var ctx = canvas.getContext('2d');
ctx.drawImage(img, 0, 0);
var expected = ctx.getImageData(0, 0, img.width, img.height).data;
var actualUrl = Module.canvas.toDataURL();
var actualImage = new Image();
actualImage.onload = function() {
/*
document.body.appendChild(img); // for comparisons
var div = document.createElement('div');
div.innerHTML = '^=expected, v=actual';
document.body.appendChild(div);
document.body.appendChild(actualImage); // to grab it for creating the test reference
*/
var actualCanvas = document.createElement('canvas');
actualCanvas.width = actualImage.width;
actualCanvas.height = actualImage.height;
var actualCtx = actualCanvas.getContext('2d');
actualCtx.drawImage(actualImage, 0, 0);
var actual = actualCtx.getImageData(0, 0, actualImage.width, actualImage.height).data;
var total = 0;
var width = img.width;
var height = img.height;
for (var x = 0; x < width; x++) {
for (var y = 0; y < height; y++) {
total += Math.abs(expected[y*width*4 + x*4 + 0] - actual[y*width*4 + x*4 + 0]);
total += Math.abs(expected[y*width*4 + x*4 + 1] - actual[y*width*4 + x*4 + 1]);
total += Math.abs(expected[y*width*4 + x*4 + 2] - actual[y*width*4 + x*4 + 2]);
}
}
var wrong = Math.floor(total / (img.width*img.height*3)); // floor, to allow some margin of error for antialiasing
// If the main JS file is in a worker, or modularize, then we need to supply our own reporting logic.
if (typeof reportResultToServer === 'undefined') {
(function() {
%s
reportResultToServer(wrong);
})();
} else {
reportResultToServer(wrong);
}
};
actualImage.src = actualUrl;
}
img.src = '%s';
};
/** @suppress {uselessCode} */
function setupRefTest() {
// Automatically trigger the reftest?
if (!%s) {
// Yes, automatically
Module['postRun'] = doReftest;
if (typeof WebGLClient !== 'undefined') {
// trigger reftest from RAF as well, needed for workers where there is no pre|postRun on the main thread
var realRAF = window.requestAnimationFrame;
/** @suppress{checkTypes} */
window.requestAnimationFrame = function(func) {
return realRAF(function() {
func();
realRAF(doReftest);
});
};
// trigger reftest from canvas render too, for workers not doing GL
var realWOM = worker.onmessage;
worker.onmessage = function(event) {
realWOM(event);
if (event.data.target === 'canvas' && event.data.op === 'render') {
realRAF(doReftest);
}
};
}
} else {
// Manually trigger the reftest.
// The user will call it.
// Add an event loop iteration to ensure rendering, so users don't need to bother.
var realDoReftest = doReftest;
doReftest = function() {
setTimeout(realDoReftest, 1);
};
}
}
setupRefTest();
''' % (reporting, basename, int(manually_trigger)))
def compile_btest(self, args, reporting=Reporting.FULL):
# Inject support code for reporting results. This adds an include a header so testcases can
# use REPORT_RESULT, and also adds a cpp file to be compiled alongside the testcase, which
# contains the implementation of REPORT_RESULT (we can't just include that implementation in
# the header as there may be multiple files being compiled here).
args += ['-sIN_TEST_HARNESS']
if reporting != Reporting.NONE:
# For basic reporting we inject JS helper funtions to report result back to server.
args += ['-DEMTEST_PORT_NUMBER=%d' % self.port,
'--pre-js', test_file('browser_reporting.js')]
if reporting == Reporting.FULL:
# If C reporting (i.e. REPORT_RESULT macro) is required
# also compile in report_result.c and forice-include report_result.h
args += ['-I' + TEST_ROOT,
'-include', test_file('report_result.h'),
test_file('report_result.c')]
if EMTEST_BROWSER == 'node':
args.append('-DEMTEST_NODE')
self.run_process([EMCC] + self.get_emcc_args() + args)
def btest_exit(self, filename, assert_returncode=0, *args, **kwargs):
"""Special case of btest that reports its result solely via exiting
with a given result code.
In this case we set EXIT_RUNTIME and we don't need to provide the
REPORT_RESULT macro to the C code.
"""
self.set_setting('EXIT_RUNTIME')
assert('reporting' not in kwargs)
assert('expected' not in kwargs)
kwargs['reporting'] = Reporting.JS_ONLY
kwargs['expected'] = 'exit:%d' % assert_returncode
return self.btest(filename, *args, **kwargs)
def btest(self, filename, expected=None, reference=None,
reference_slack=0, manual_reference=False, post_build=None,
args=None, message='.', also_proxied=False,
url_suffix='', timeout=None, also_wasm2js=False,
manually_trigger_reftest=False, extra_tries=1,
reporting=Reporting.FULL):
assert expected or reference, 'a btest must either expect an output, or have a reference image'
if args is None:
args = []
original_args = args
args = args.copy()
if not os.path.exists(filename):
filename = test_file(filename)
if reference:
self.reference = reference
expected = [str(i) for i in range(0, reference_slack + 1)]
self.reftest(test_file(reference), manually_trigger=manually_trigger_reftest)
if not manual_reference:
args += ['--pre-js', 'reftest.js', '-sGL_TESTING']
outfile = 'test.html'
args += [filename, '-o', outfile]
# print('all args:', args)
try_delete(outfile)
self.compile_btest(args, reporting=reporting)
self.assertExists(outfile)
if post_build:
post_build()
if not isinstance(expected, list):
expected = [expected]
if EMTEST_BROWSER == 'node':
self.js_engines = [config.NODE_JS]
self.node_args += ['--experimental-wasm-threads', '--experimental-wasm-bulk-memory']
output = self.run_js('test.js')
self.assertContained('RESULT: ' + expected[0], output)
else:
self.run_browser(outfile + url_suffix, message, ['/report_result?' + e for e in expected], timeout=timeout, extra_tries=extra_tries)
# Tests can opt into being run under asmjs as well
if 'WASM=0' not in original_args and (also_wasm2js or self.also_wasm2js):
print('WASM=0')
self.btest(filename, expected, reference, reference_slack, manual_reference, post_build,
original_args + ['-sWASM=0'], message, also_proxied=False, timeout=timeout)
if also_proxied:
print('proxied...')
if reference:
assert not manual_reference
manual_reference = True
assert not post_build
post_build = self.post_manual_reftest
# run proxied
self.btest(filename, expected, reference, reference_slack, manual_reference, post_build,
original_args + ['--proxy-to-worker', '-sGL_TESTING'], message, timeout=timeout)
###################################################################################################
def build_library(name,
build_dir,
output_dir,
generated_libs,
configure,
make,
make_args=[],
cache=None,
cache_name=None,
env_init={},
native=False):
"""Build a library and cache the result. We build the library file
once and cache it for all our tests. (We cache in memory since the test
directory is destroyed and recreated for each test. Note that we cache
separately for different compilers). This cache is just during the test
runner. There is a different concept of caching as well, see |Cache|.
"""
if type(generated_libs) is not list:
generated_libs = [generated_libs]
source_dir = test_file(name.replace('_native', ''))
project_dir = Path(build_dir, name)
if os.path.exists(project_dir):
shutil.rmtree(project_dir)
# Useful in debugging sometimes to comment this out, and two lines above
shutil.copytree(source_dir, project_dir)
generated_libs = [os.path.join(project_dir, lib) for lib in generated_libs]
if native:
env = clang_native.get_clang_native_env()
else:
env = os.environ.copy()
env.update(env_init)
if not native:
# Inject emcmake, emconfigure or emmake accordingly, but only if we are
# cross compiling.
if configure:
if configure[0] == 'cmake':
configure = [EMCMAKE] + configure
else:
configure = [EMCONFIGURE] + configure
else:
make = [EMMAKE] + make
if configure:
try:
with open(os.path.join(project_dir, 'configure_out'), 'w') as out:
with open(os.path.join(project_dir, 'configure_err'), 'w') as err:
stdout = out if EMTEST_BUILD_VERBOSE < 2 else None
stderr = err if EMTEST_BUILD_VERBOSE < 1 else None
shared.run_process(configure, env=env, stdout=stdout, stderr=stderr,
cwd=project_dir)
except subprocess.CalledProcessError:
print('-- configure stdout --')
print(read_file(Path(project_dir, 'configure_out')))
print('-- end configure stdout --')
print('-- configure stderr --')
print(read_file(Path(project_dir, 'configure_err')))
print('-- end configure stderr --')
raise
# if we run configure or cmake we don't then need any kind
# of special env when we run make below
env = None
def open_make_out(mode='r'):
return open(os.path.join(project_dir, 'make.out'), mode)
def open_make_err(mode='r'):
return open(os.path.join(project_dir, 'make.err'), mode)
if EMTEST_BUILD_VERBOSE >= 3:
make_args += ['VERBOSE=1']
try:
with open_make_out('w') as make_out:
with open_make_err('w') as make_err:
stdout = make_out if EMTEST_BUILD_VERBOSE < 2 else None
stderr = make_err if EMTEST_BUILD_VERBOSE < 1 else None
shared.run_process(make + make_args, stdout=stdout, stderr=stderr, env=env,
cwd=project_dir)
except subprocess.CalledProcessError:
with open_make_out() as f:
print('-- make stdout --')
print(f.read())
print('-- end make stdout --')
with open_make_err() as f:
print('-- make stderr --')
print(f.read())
print('-- end stderr --')
raise
if cache is not None:
cache[cache_name] = []
for f in generated_libs:
basename = os.path.basename(f)
cache[cache_name].append((basename, read_binary(f)))
return generated_libs
| 38.013669
| 205
| 0.649129
|
4e2d67e3d315abd4c0cab5613b38c6e46b42b91a
| 6,945
|
py
|
Python
|
qcodes/instrument/ip.py
|
aaroncslau/Qcodes
|
ba1920198614e0923fbc046efcec2effb36db8f2
|
[
"MIT"
] | 2
|
2019-02-14T00:07:06.000Z
|
2021-03-30T03:38:06.000Z
|
qcodes/instrument/ip.py
|
qdev-dk/Qcodes
|
f587f0c4dd74271b3b9156167fbff3dceb2d185d
|
[
"MIT"
] | 22
|
2017-02-08T08:37:23.000Z
|
2017-11-24T14:18:20.000Z
|
qcodes/instrument/ip.py
|
aaroncslau/Qcodes
|
ba1920198614e0923fbc046efcec2effb36db8f2
|
[
"MIT"
] | 6
|
2017-03-31T21:01:08.000Z
|
2019-08-20T09:25:22.000Z
|
"""Ethernet instrument driver class based on sockets."""
import socket
import logging
from .base import Instrument
log = logging.getLogger(__name__)
class IPInstrument(Instrument):
r"""
Bare socket ethernet instrument implementation.
Args:
name (str): What this instrument is called locally.
address (Optional[str]): The IP address or name. If not given on
construction, must be provided before any communication.
port (Optional[int]): The IP port. If not given on construction, must
be provided before any communication.
timeout (number): Seconds to allow for responses. Default 5.
terminator (str): Character(s) to terminate each send. Default '\n'.
persistent (bool): Whether to leave the socket open between calls.
Default True.
write_confirmation (bool): Whether the instrument acknowledges writes
with some response we should read. Default True.
metadata (Optional[Dict]): additional static metadata to add to this
instrument's JSON snapshot.
See help for ``qcodes.Instrument`` for additional information on writing
instrument subclasses.
"""
def __init__(self, name, address=None, port=None, timeout=5,
terminator='\n', persistent=True, write_confirmation=True, testing=False,
**kwargs):
super().__init__(name, testing=testing, **kwargs)
self._address = address
self._port = port
self._timeout = timeout
self._terminator = terminator
self._confirmation = write_confirmation
self._ensure_connection = EnsureConnection(self)
self._buffer_size = 1400
self._socket = None
self.set_persistent(persistent)
def set_address(self, address=None, port=None):
"""
Change the IP address and/or port of this instrument.
Args:
address (Optional[str]): The IP address or name.
port (Optional[number]): The IP port.
"""
if address is not None:
self._address = address
elif not hasattr(self, '_address'):
raise TypeError('This instrument doesn\'t have an address yet, '
'you must provide one.')
if port is not None:
self._port = port
elif not hasattr(self, '_port'):
raise TypeError('This instrument doesn\'t have a port yet, '
'you must provide one.')
self._disconnect()
self.set_persistent(self._persistent)
def set_persistent(self, persistent):
"""
Change whether this instrument keeps its socket open between calls.
Args:
persistent (bool): Set True to keep the socket open all the time.
"""
self._persistent = persistent
if persistent:
self._connect()
else:
self._disconnect()
def flush_connection(self):
if not self._testing:
self._recv()
def _connect(self):
if self._testing:
return
if self._socket is not None:
self._disconnect()
try:
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.connect((self._address, self._port))
self.set_timeout(self._timeout)
except ConnectionRefusedError:
self._socket.close()
self._socket = None
def _disconnect(self):
if getattr(self, '_socket', None) is None:
return
self._socket.shutdown(socket.SHUT_RDWR)
self._socket.close()
self._socket = None
def set_timeout(self, timeout=None):
"""
Change the read timeout for the socket.
Args:
timeout (number): Seconds to allow for responses.
"""
self._timeout = timeout
if self._socket is not None:
self._socket.settimeout(float(self._timeout))
def set_terminator(self, terminator):
r"""
Change the write terminator to use.
Args:
terminator (str): Character(s) to terminate each send.
Default '\n'.
"""
self._terminator = terminator
def _send(self, cmd):
data = cmd + self._terminator
self._socket.sendall(data.encode())
def _recv(self):
result = self._socket.recv(self._buffer_size)
if result == b'':
log.warning("Got empty response from Socket recv() "
"Connection broken.")
return result.decode()
def close(self):
"""Disconnect and irreversibly tear down the instrument."""
self._disconnect()
super().close()
def write_raw(self, cmd):
"""
Low-level interface to send a command that gets no response.
Args:
cmd (str): The command to send to the instrument.
"""
with self._ensure_connection:
self._send(cmd)
if self._confirmation:
self._recv()
def ask_raw(self, cmd):
"""
Low-level interface to send a command an read a response.
Args:
cmd (str): The command to send to the instrument.
Returns:
str: The instrument's response.
"""
with self._ensure_connection:
self._send(cmd)
return self._recv()
def __del__(self):
self.close()
def snapshot_base(self, update=False):
"""
State of the instrument as a JSON-compatible dict.
Args:
update (bool): If True, update the state by querying the
instrument. If False, just use the latest values in memory.
Returns:
dict: base snapshot
"""
snap = super().snapshot_base(update=update)
snap['port'] = self._port
snap['confirmation'] = self._confirmation
snap['address'] = self._address
snap['terminator'] = self._terminator
snap['timeout'] = self._timeout
snap['persistent'] = self._persistent
return snap
class EnsureConnection:
"""
Context manager to ensure an instrument is connected when needed.
Uses ``instrument._persistent`` to determine whether or not to close
the connection immediately on completion.
Args:
instrument (IPInstrument): the instance to connect.
"""
def __init__(self, instrument):
self.instrument = instrument
def __enter__(self):
"""Make sure we connect when entering the context."""
if not self.instrument._persistent or self.instrument._socket is None:
self.instrument._connect()
def __exit__(self, type, value, tb):
"""Possibly disconnect on exiting the context."""
if not self.instrument._persistent:
self.instrument._disconnect()
| 29.553191
| 90
| 0.59856
|
c914079e0a8d97bedba7283f07874a1f539f77a4
| 4,599
|
py
|
Python
|
data_structures/binary_tree/non_recursive_segment_tree.py
|
NavpreetDevpuri/Python
|
7ef5ae66d777e8ed702993c6aa9270e0669cb0c6
|
[
"MIT"
] | 13
|
2021-03-11T00:25:22.000Z
|
2022-03-19T00:19:23.000Z
|
data_structures/binary_tree/non_recursive_segment_tree.py
|
Agha-Muqarib/Python
|
04f156a8973d6156a4357e0717d9eb0aa264d086
|
[
"MIT"
] | 279
|
2020-02-12T20:51:09.000Z
|
2021-07-20T11:25:19.000Z
|
data_structures/binary_tree/non_recursive_segment_tree.py
|
Agha-Muqarib/Python
|
04f156a8973d6156a4357e0717d9eb0aa264d086
|
[
"MIT"
] | 12
|
2021-04-26T19:43:01.000Z
|
2022-01-31T08:36:29.000Z
|
"""
A non-recursive Segment Tree implementation with range query and single element update,
works virtually with any list of the same type of elements with a "commutative"
combiner.
Explanation:
https://www.geeksforgeeks.org/iterative-segment-tree-range-minimum-query/
https://www.geeksforgeeks.org/segment-tree-efficient-implementation/
>>> SegmentTree([1, 2, 3], lambda a, b: a + b).query(0, 2)
6
>>> SegmentTree([3, 1, 2], min).query(0, 2)
1
>>> SegmentTree([2, 3, 1], max).query(0, 2)
3
>>> st = SegmentTree([1, 5, 7, -1, 6], lambda a, b: a + b)
>>> st.update(1, -1)
>>> st.update(2, 3)
>>> st.query(1, 2)
2
>>> st.query(1, 1)
-1
>>> st.update(4, 1)
>>> st.query(3, 4)
0
>>> st = SegmentTree([[1, 2, 3], [3, 2, 1], [1, 1, 1]], lambda a, b: [a[i] + b[i] for i
... in range(len(a))])
>>> st.query(0, 1)
[4, 4, 4]
>>> st.query(1, 2)
[4, 3, 2]
>>> st.update(1, [-1, -1, -1])
>>> st.query(1, 2)
[0, 0, 0]
>>> st.query(0, 2)
[1, 2, 3]
"""
from __future__ import annotations
from typing import Callable, TypeVar
T = TypeVar("T")
class SegmentTree:
def __init__(self, arr: list[T], fnc: Callable[[T, T], T]) -> None:
"""
Segment Tree constructor, it works just with commutative combiner.
:param arr: list of elements for the segment tree
:param fnc: commutative function for combine two elements
>>> SegmentTree(['a', 'b', 'c'], lambda a, b: f'{a}{b}').query(0, 2)
'abc'
>>> SegmentTree([(1, 2), (2, 3), (3, 4)],
... lambda a, b: (a[0] + b[0], a[1] + b[1])).query(0, 2)
(6, 9)
"""
self.N = len(arr)
self.st = [None for _ in range(len(arr))] + arr
self.fn = fnc
self.build()
def build(self) -> None:
for p in range(self.N - 1, 0, -1):
self.st[p] = self.fn(self.st[p * 2], self.st[p * 2 + 1])
def update(self, p: int, v: T) -> None:
"""
Update an element in log(N) time
:param p: position to be update
:param v: new value
>>> st = SegmentTree([3, 1, 2, 4], min)
>>> st.query(0, 3)
1
>>> st.update(2, -1)
>>> st.query(0, 3)
-1
"""
p += self.N
self.st[p] = v
while p > 1:
p = p // 2
self.st[p] = self.fn(self.st[p * 2], self.st[p * 2 + 1])
def query(self, l: int, r: int) -> T: # noqa: E741
"""
Get range query value in log(N) time
:param l: left element index
:param r: right element index
:return: element combined in the range [l, r]
>>> st = SegmentTree([1, 2, 3, 4], lambda a, b: a + b)
>>> st.query(0, 2)
6
>>> st.query(1, 2)
5
>>> st.query(0, 3)
10
>>> st.query(2, 3)
7
"""
l, r = l + self.N, r + self.N # noqa: E741
res = None
while l <= r: # noqa: E741
if l % 2 == 1:
res = self.st[l] if res is None else self.fn(res, self.st[l])
if r % 2 == 0:
res = self.st[r] if res is None else self.fn(res, self.st[r])
l, r = (l + 1) // 2, (r - 1) // 2
return res
if __name__ == "__main__":
from functools import reduce
test_array = [1, 10, -2, 9, -3, 8, 4, -7, 5, 6, 11, -12]
test_updates = {
0: 7,
1: 2,
2: 6,
3: -14,
4: 5,
5: 4,
6: 7,
7: -10,
8: 9,
9: 10,
10: 12,
11: 1,
}
min_segment_tree = SegmentTree(test_array, min)
max_segment_tree = SegmentTree(test_array, max)
sum_segment_tree = SegmentTree(test_array, lambda a, b: a + b)
def test_all_segments():
"""
Test all possible segments
"""
for i in range(len(test_array)):
for j in range(i, len(test_array)):
min_range = reduce(min, test_array[i : j + 1])
max_range = reduce(max, test_array[i : j + 1])
sum_range = reduce(lambda a, b: a + b, test_array[i : j + 1])
assert min_range == min_segment_tree.query(i, j)
assert max_range == max_segment_tree.query(i, j)
assert sum_range == sum_segment_tree.query(i, j)
test_all_segments()
for index, value in test_updates.items():
test_array[index] = value
min_segment_tree.update(index, value)
max_segment_tree.update(index, value)
sum_segment_tree.update(index, value)
test_all_segments()
| 28.924528
| 88
| 0.499674
|
ac734533ba44864e72f4305dceb211fc88708a98
| 46,394
|
py
|
Python
|
_resource/python/ops/gen_io_ops.py
|
amlyj/tensorflowStudy
|
1e3a4b15a57d53e746fd730af540da4be471c70b
|
[
"MIT"
] | 1
|
2021-04-08T17:05:42.000Z
|
2021-04-08T17:05:42.000Z
|
_resource/python/ops/gen_io_ops.py
|
amlyj/tensorflowStudy
|
1e3a4b15a57d53e746fd730af540da4be471c70b
|
[
"MIT"
] | 3
|
2022-02-13T22:40:04.000Z
|
2022-02-27T11:06:59.000Z
|
_resource/python/ops/gen_io_ops.py
|
amlyj/tensorflowStudy
|
1e3a4b15a57d53e746fd730af540da4be471c70b
|
[
"MIT"
] | 1
|
2021-05-20T00:39:38.000Z
|
2021-05-20T00:39:38.000Z
|
"""Python wrappers around Brain.
This file is MACHINE GENERATED! Do not edit.
"""
import collections as _collections
from google.protobuf import text_format as _text_format
from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes as _common_shapes
from tensorflow.python.framework import op_def_registry as _op_def_registry
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import op_def_library as _op_def_library
__fixed_length_record_reader_outputs = ["reader_handle"]
def _fixed_length_record_reader(record_bytes, header_bytes=None,
footer_bytes=None, container=None,
shared_name=None, name=None):
r"""A Reader that outputs fixed-length records from a file.
Args:
record_bytes: An `int`.
header_bytes: An optional `int`. Defaults to `0`.
footer_bytes: An optional `int`. Defaults to `0`.
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
name: A name for the operation (optional).
Returns:
A `Tensor` of type mutable `string`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("FixedLengthRecordReader",
record_bytes=record_bytes,
header_bytes=header_bytes,
footer_bytes=footer_bytes,
container=container, shared_name=shared_name,
name=name)
return result
__fixed_length_record_reader_v2_outputs = ["reader_handle"]
def _fixed_length_record_reader_v2(record_bytes, header_bytes=None,
footer_bytes=None, container=None,
shared_name=None, name=None):
r"""A Reader that outputs fixed-length records from a file.
Args:
record_bytes: An `int`.
header_bytes: An optional `int`. Defaults to `0`.
footer_bytes: An optional `int`. Defaults to `0`.
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("FixedLengthRecordReaderV2",
record_bytes=record_bytes,
header_bytes=header_bytes,
footer_bytes=footer_bytes,
container=container, shared_name=shared_name,
name=name)
return result
__identity_reader_outputs = ["reader_handle"]
def _identity_reader(container=None, shared_name=None, name=None):
r"""A Reader that outputs the queued work as both the key and value.
To use, enqueue strings in a Queue. ReaderRead will take the front
work string and output (work, work).
Args:
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
name: A name for the operation (optional).
Returns:
A `Tensor` of type mutable `string`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("IdentityReader", container=container,
shared_name=shared_name, name=name)
return result
__identity_reader_v2_outputs = ["reader_handle"]
def _identity_reader_v2(container=None, shared_name=None, name=None):
r"""A Reader that outputs the queued work as both the key and value.
To use, enqueue strings in a Queue. ReaderRead will take the front
work string and output (work, work).
Args:
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("IdentityReaderV2", container=container,
shared_name=shared_name, name=name)
return result
_matching_files_outputs = ["filenames"]
def matching_files(pattern, name=None):
r"""Returns the set of files matching one or more glob patterns.
Note that this routine only supports wildcard characters in the
basename portion of the pattern, not in the directory portion.
Args:
pattern: A `Tensor` of type `string`.
Shell wildcard pattern(s). Scalar or vector of type string.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`. A vector of matching filenames.
"""
result = _op_def_lib.apply_op("MatchingFiles", pattern=pattern, name=name)
return result
_merge_v2_checkpoints_outputs = [""]
def merge_v2_checkpoints(checkpoint_prefixes, destination_prefix,
delete_old_dirs=None, name=None):
r"""V2 format specific: merges the metadata files of sharded checkpoints. The
result is one logical checkpoint, with one physical metadata file and renamed
data files.
Intended for "grouping" multiple checkpoints in a sharded checkpoint setup.
If delete_old_dirs is true, attempts to delete recursively the dirname of each
path in the input checkpoint_prefixes. This is useful when those paths are non
user-facing temporary locations.
Args:
checkpoint_prefixes: A `Tensor` of type `string`.
prefixes of V2 checkpoints to merge.
destination_prefix: A `Tensor` of type `string`.
scalar. The desired final prefix. Allowed to be the same
as one of the checkpoint_prefixes.
delete_old_dirs: An optional `bool`. Defaults to `True`. see above.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
result = _op_def_lib.apply_op("MergeV2Checkpoints",
checkpoint_prefixes=checkpoint_prefixes,
destination_prefix=destination_prefix,
delete_old_dirs=delete_old_dirs, name=name)
return result
_read_file_outputs = ["contents"]
def read_file(filename, name=None):
r"""Reads and outputs the entire contents of the input filename.
Args:
filename: A `Tensor` of type `string`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`.
"""
result = _op_def_lib.apply_op("ReadFile", filename=filename, name=name)
return result
__reader_num_records_produced_outputs = ["records_produced"]
def _reader_num_records_produced(reader_handle, name=None):
r"""Returns the number of records this Reader has produced.
This is the same as the number of ReaderRead executions that have
succeeded.
Args:
reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `int64`.
"""
result = _op_def_lib.apply_op("ReaderNumRecordsProduced",
reader_handle=reader_handle, name=name)
return result
__reader_num_records_produced_v2_outputs = ["records_produced"]
def _reader_num_records_produced_v2(reader_handle, name=None):
r"""Returns the number of records this Reader has produced.
This is the same as the number of ReaderRead executions that have
succeeded.
Args:
reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `int64`.
"""
result = _op_def_lib.apply_op("ReaderNumRecordsProducedV2",
reader_handle=reader_handle, name=name)
return result
__reader_num_work_units_completed_outputs = ["units_completed"]
def _reader_num_work_units_completed(reader_handle, name=None):
r"""Returns the number of work units this Reader has finished processing.
Args:
reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `int64`.
"""
result = _op_def_lib.apply_op("ReaderNumWorkUnitsCompleted",
reader_handle=reader_handle, name=name)
return result
__reader_num_work_units_completed_v2_outputs = ["units_completed"]
def _reader_num_work_units_completed_v2(reader_handle, name=None):
r"""Returns the number of work units this Reader has finished processing.
Args:
reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `int64`.
"""
result = _op_def_lib.apply_op("ReaderNumWorkUnitsCompletedV2",
reader_handle=reader_handle, name=name)
return result
__reader_read_outputs = ["key", "value"]
_ReaderReadOutput = _collections.namedtuple("ReaderRead",
__reader_read_outputs)
def _reader_read(reader_handle, queue_handle, name=None):
r"""Returns the next record (key, value pair) produced by a Reader.
Will dequeue from the input queue if necessary (e.g. when the
Reader needs to start reading from a new file since it has finished
with the previous file).
Args:
reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
queue_handle: A `Tensor` of type mutable `string`.
Handle to a Queue, with string work items.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (key, value).
key: A `Tensor` of type `string`. A scalar.
value: A `Tensor` of type `string`. A scalar.
"""
result = _op_def_lib.apply_op("ReaderRead", reader_handle=reader_handle,
queue_handle=queue_handle, name=name)
return _ReaderReadOutput._make(result)
__reader_read_up_to_outputs = ["keys", "values"]
_ReaderReadUpToOutput = _collections.namedtuple("ReaderReadUpTo",
__reader_read_up_to_outputs)
def _reader_read_up_to(reader_handle, queue_handle, num_records, name=None):
r"""Returns up to `num_records` (key, value) pairs produced by a Reader.
Will dequeue from the input queue if necessary (e.g. when the
Reader needs to start reading from a new file since it has finished
with the previous file).
It may return less than `num_records` even before the last batch.
Args:
reader_handle: A `Tensor` of type mutable `string`. Handle to a `Reader`.
queue_handle: A `Tensor` of type mutable `string`.
Handle to a `Queue`, with string work items.
num_records: A `Tensor` of type `int64`.
number of records to read from `Reader`.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (keys, values).
keys: A `Tensor` of type `string`. A 1-D tensor.
values: A `Tensor` of type `string`. A 1-D tensor.
"""
result = _op_def_lib.apply_op("ReaderReadUpTo", reader_handle=reader_handle,
queue_handle=queue_handle,
num_records=num_records, name=name)
return _ReaderReadUpToOutput._make(result)
__reader_read_up_to_v2_outputs = ["keys", "values"]
_ReaderReadUpToV2Output = _collections.namedtuple("ReaderReadUpToV2",
__reader_read_up_to_v2_outputs)
def _reader_read_up_to_v2(reader_handle, queue_handle, num_records,
name=None):
r"""Returns up to `num_records` (key, value) pairs produced by a Reader.
Will dequeue from the input queue if necessary (e.g. when the
Reader needs to start reading from a new file since it has finished
with the previous file).
It may return less than `num_records` even before the last batch.
Args:
reader_handle: A `Tensor` of type `resource`. Handle to a `Reader`.
queue_handle: A `Tensor` of type `resource`.
Handle to a `Queue`, with string work items.
num_records: A `Tensor` of type `int64`.
number of records to read from `Reader`.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (keys, values).
keys: A `Tensor` of type `string`. A 1-D tensor.
values: A `Tensor` of type `string`. A 1-D tensor.
"""
result = _op_def_lib.apply_op("ReaderReadUpToV2",
reader_handle=reader_handle,
queue_handle=queue_handle,
num_records=num_records, name=name)
return _ReaderReadUpToV2Output._make(result)
__reader_read_v2_outputs = ["key", "value"]
_ReaderReadV2Output = _collections.namedtuple("ReaderReadV2",
__reader_read_v2_outputs)
def _reader_read_v2(reader_handle, queue_handle, name=None):
r"""Returns the next record (key, value pair) produced by a Reader.
Will dequeue from the input queue if necessary (e.g. when the
Reader needs to start reading from a new file since it has finished
with the previous file).
Args:
reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
queue_handle: A `Tensor` of type `resource`.
Handle to a Queue, with string work items.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (key, value).
key: A `Tensor` of type `string`. A scalar.
value: A `Tensor` of type `string`. A scalar.
"""
result = _op_def_lib.apply_op("ReaderReadV2", reader_handle=reader_handle,
queue_handle=queue_handle, name=name)
return _ReaderReadV2Output._make(result)
__reader_reset_outputs = [""]
def _reader_reset(reader_handle, name=None):
r"""Restore a Reader to its initial clean state.
Args:
reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
result = _op_def_lib.apply_op("ReaderReset", reader_handle=reader_handle,
name=name)
return result
__reader_reset_v2_outputs = [""]
def _reader_reset_v2(reader_handle, name=None):
r"""Restore a Reader to its initial clean state.
Args:
reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
result = _op_def_lib.apply_op("ReaderResetV2", reader_handle=reader_handle,
name=name)
return result
__reader_restore_state_outputs = [""]
def _reader_restore_state(reader_handle, state, name=None):
r"""Restore a reader to a previously saved state.
Not all Readers support being restored, so this can produce an
Unimplemented error.
Args:
reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
state: A `Tensor` of type `string`.
Result of a ReaderSerializeState of a Reader with type
matching reader_handle.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
result = _op_def_lib.apply_op("ReaderRestoreState",
reader_handle=reader_handle, state=state,
name=name)
return result
__reader_restore_state_v2_outputs = [""]
def _reader_restore_state_v2(reader_handle, state, name=None):
r"""Restore a reader to a previously saved state.
Not all Readers support being restored, so this can produce an
Unimplemented error.
Args:
reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
state: A `Tensor` of type `string`.
Result of a ReaderSerializeState of a Reader with type
matching reader_handle.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
result = _op_def_lib.apply_op("ReaderRestoreStateV2",
reader_handle=reader_handle, state=state,
name=name)
return result
__reader_serialize_state_outputs = ["state"]
def _reader_serialize_state(reader_handle, name=None):
r"""Produce a string tensor that encodes the state of a Reader.
Not all Readers support being serialized, so this can produce an
Unimplemented error.
Args:
reader_handle: A `Tensor` of type mutable `string`. Handle to a Reader.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`.
"""
result = _op_def_lib.apply_op("ReaderSerializeState",
reader_handle=reader_handle, name=name)
return result
__reader_serialize_state_v2_outputs = ["state"]
def _reader_serialize_state_v2(reader_handle, name=None):
r"""Produce a string tensor that encodes the state of a Reader.
Not all Readers support being serialized, so this can produce an
Unimplemented error.
Args:
reader_handle: A `Tensor` of type `resource`. Handle to a Reader.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`.
"""
result = _op_def_lib.apply_op("ReaderSerializeStateV2",
reader_handle=reader_handle, name=name)
return result
__restore_outputs = ["tensor"]
def _restore(file_pattern, tensor_name, dt, preferred_shard=None, name=None):
r"""Restores a tensor from checkpoint files.
Reads a tensor stored in one or several files. If there are several files (for
instance because a tensor was saved as slices), `file_pattern` may contain
wildcard symbols (`*` and `?`) in the filename portion only, not in the
directory portion.
If a `file_pattern` matches several files, `preferred_shard` can be used to hint
in which file the requested tensor is likely to be found. This op will first
open the file at index `preferred_shard` in the list of matching files and try
to restore tensors from that file. Only if some tensors or tensor slices are
not found in that first file, then the Op opens all the files. Setting
`preferred_shard` to match the value passed as the `shard` input
of a matching `Save` Op may speed up Restore. This attribute only affects
performance, not correctness. The default value -1 means files are processed in
order.
See also `RestoreSlice`.
Args:
file_pattern: A `Tensor` of type `string`.
Must have a single element. The pattern of the files from
which we read the tensor.
tensor_name: A `Tensor` of type `string`.
Must have a single element. The name of the tensor to be
restored.
dt: A `tf.DType`. The type of the tensor to be restored.
preferred_shard: An optional `int`. Defaults to `-1`.
Index of file to open first if multiple files match
`file_pattern`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `dt`. The restored tensor.
"""
result = _op_def_lib.apply_op("Restore", file_pattern=file_pattern,
tensor_name=tensor_name, dt=dt,
preferred_shard=preferred_shard, name=name)
return result
__restore_slice_outputs = ["tensor"]
def _restore_slice(file_pattern, tensor_name, shape_and_slice, dt,
preferred_shard=None, name=None):
r"""Restores a tensor from checkpoint files.
This is like `Restore` except that restored tensor can be listed as filling
only a slice of a larger tensor. `shape_and_slice` specifies the shape of the
larger tensor and the slice that the restored tensor covers.
The `shape_and_slice` input has the same format as the
elements of the `shapes_and_slices` input of the `SaveSlices` op.
Args:
file_pattern: A `Tensor` of type `string`.
Must have a single element. The pattern of the files from
which we read the tensor.
tensor_name: A `Tensor` of type `string`.
Must have a single element. The name of the tensor to be
restored.
shape_and_slice: A `Tensor` of type `string`.
Scalar. The shapes and slice specifications to use when
restoring a tensors.
dt: A `tf.DType`. The type of the tensor to be restored.
preferred_shard: An optional `int`. Defaults to `-1`.
Index of file to open first if multiple files match
`file_pattern`. See the documentation for `Restore`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `dt`. The restored tensor.
"""
result = _op_def_lib.apply_op("RestoreSlice", file_pattern=file_pattern,
tensor_name=tensor_name,
shape_and_slice=shape_and_slice, dt=dt,
preferred_shard=preferred_shard, name=name)
return result
_restore_v2_outputs = ["tensors"]
def restore_v2(prefix, tensor_names, shape_and_slices, dtypes, name=None):
r"""Restores tensors from a V2 checkpoint.
For backward compatibility with the V1 format, this Op currently allows
restoring from a V1 checkpoint as well:
- This Op first attempts to find the V2 index file pointed to by "prefix", and
if found proceed to read it as a V2 checkpoint;
- Otherwise the V1 read path is invoked.
Relying on this behavior is not recommended, as the ability to fall back to read
V1 might be deprecated and eventually removed.
By default, restores the named tensors in full. If the caller wishes to restore
specific slices of stored tensors, "shape_and_slices" should be non-empty
strings and correspondingly well-formed.
Callers must ensure all the named tensors are indeed stored in the checkpoint.
Args:
prefix: A `Tensor` of type `string`.
Must have a single element. The prefix of a V2 checkpoint.
tensor_names: A `Tensor` of type `string`.
shape {N}. The names of the tensors to be restored.
shape_and_slices: A `Tensor` of type `string`.
shape {N}. The slice specs of the tensors to be restored.
Empty strings indicate that they are non-partitioned tensors.
dtypes: A list of `tf.DTypes` that has length `>= 1`.
shape {N}. The list of expected dtype for the tensors. Must match
those stored in the checkpoint.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects of type `dtypes`.
shape {N}. The restored tensors, whose shapes are read from the
checkpoint directly.
"""
result = _op_def_lib.apply_op("RestoreV2", prefix=prefix,
tensor_names=tensor_names,
shape_and_slices=shape_and_slices,
dtypes=dtypes, name=name)
return result
__save_outputs = [""]
def _save(filename, tensor_names, data, name=None):
r"""Saves the input tensors to disk.
The size of `tensor_names` must match the number of tensors in `data`. `data[i]`
is written to `filename` with name `tensor_names[i]`.
See also `SaveSlices`.
Args:
filename: A `Tensor` of type `string`.
Must have a single element. The name of the file to which we write
the tensor.
tensor_names: A `Tensor` of type `string`.
Shape `[N]`. The names of the tensors to be saved.
data: A list of `Tensor` objects. `N` tensors to save.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
result = _op_def_lib.apply_op("Save", filename=filename,
tensor_names=tensor_names, data=data,
name=name)
return result
__save_slices_outputs = [""]
def _save_slices(filename, tensor_names, shapes_and_slices, data, name=None):
r"""Saves input tensors slices to disk.
This is like `Save` except that tensors can be listed in the saved file as being
a slice of a larger tensor. `shapes_and_slices` specifies the shape of the
larger tensor and the slice that this tensor covers. `shapes_and_slices` must
have as many elements as `tensor_names`.
Elements of the `shapes_and_slices` input must either be:
* The empty string, in which case the corresponding tensor is
saved normally.
* A string of the form `dim0 dim1 ... dimN-1 slice-spec` where the
`dimI` are the dimensions of the larger tensor and `slice-spec`
specifies what part is covered by the tensor to save.
`slice-spec` itself is a `:`-separated list: `slice0:slice1:...:sliceN-1`
where each `sliceI` is either:
* The string `-` meaning that the slice covers all indices of this dimension
* `start,length` where `start` and `length` are integers. In that
case the slice covers `length` indices starting at `start`.
See also `Save`.
Args:
filename: A `Tensor` of type `string`.
Must have a single element. The name of the file to which we write the
tensor.
tensor_names: A `Tensor` of type `string`.
Shape `[N]`. The names of the tensors to be saved.
shapes_and_slices: A `Tensor` of type `string`.
Shape `[N]`. The shapes and slice specifications to use when
saving the tensors.
data: A list of `Tensor` objects. `N` tensors to save.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
result = _op_def_lib.apply_op("SaveSlices", filename=filename,
tensor_names=tensor_names,
shapes_and_slices=shapes_and_slices,
data=data, name=name)
return result
_save_v2_outputs = [""]
def save_v2(prefix, tensor_names, shape_and_slices, tensors, name=None):
r"""Saves tensors in V2 checkpoint format.
By default, saves the named tensors in full. If the caller wishes to save
specific slices of full tensors, "shape_and_slices" should be non-empty strings
and correspondingly well-formed.
Args:
prefix: A `Tensor` of type `string`.
Must have a single element. The prefix of the V2 checkpoint to which we
write the tensors.
tensor_names: A `Tensor` of type `string`.
shape {N}. The names of the tensors to be saved.
shape_and_slices: A `Tensor` of type `string`.
shape {N}. The slice specs of the tensors to be saved.
Empty strings indicate that they are non-partitioned tensors.
tensors: A list of `Tensor` objects. `N` tensors to save.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
result = _op_def_lib.apply_op("SaveV2", prefix=prefix,
tensor_names=tensor_names,
shape_and_slices=shape_and_slices,
tensors=tensors, name=name)
return result
__sharded_filename_outputs = ["filename"]
def _sharded_filename(basename, shard, num_shards, name=None):
r"""Generate a sharded filename. The filename is printf formatted as
%s-%05d-of-%05d, basename, shard, num_shards.
Args:
basename: A `Tensor` of type `string`.
shard: A `Tensor` of type `int32`.
num_shards: A `Tensor` of type `int32`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`.
"""
result = _op_def_lib.apply_op("ShardedFilename", basename=basename,
shard=shard, num_shards=num_shards, name=name)
return result
__sharded_filespec_outputs = ["filename"]
def _sharded_filespec(basename, num_shards, name=None):
r"""Generate a glob pattern matching all sharded file names.
Args:
basename: A `Tensor` of type `string`.
num_shards: A `Tensor` of type `int32`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`.
"""
result = _op_def_lib.apply_op("ShardedFilespec", basename=basename,
num_shards=num_shards, name=name)
return result
__tf_record_reader_outputs = ["reader_handle"]
def _tf_record_reader(container=None, shared_name=None, compression_type=None,
name=None):
r"""A Reader that outputs the records from a TensorFlow Records file.
Args:
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
compression_type: An optional `string`. Defaults to `""`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type mutable `string`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("TFRecordReader", container=container,
shared_name=shared_name,
compression_type=compression_type, name=name)
return result
__tf_record_reader_v2_outputs = ["reader_handle"]
def _tf_record_reader_v2(container=None, shared_name=None,
compression_type=None, name=None):
r"""A Reader that outputs the records from a TensorFlow Records file.
Args:
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
compression_type: An optional `string`. Defaults to `""`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("TFRecordReaderV2", container=container,
shared_name=shared_name,
compression_type=compression_type, name=name)
return result
__text_line_reader_outputs = ["reader_handle"]
def _text_line_reader(skip_header_lines=None, container=None,
shared_name=None, name=None):
r"""A Reader that outputs the lines of a file delimited by '\n'.
Args:
skip_header_lines: An optional `int`. Defaults to `0`.
Number of lines to skip from the beginning of every file.
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
name: A name for the operation (optional).
Returns:
A `Tensor` of type mutable `string`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("TextLineReader",
skip_header_lines=skip_header_lines,
container=container, shared_name=shared_name,
name=name)
return result
__text_line_reader_v2_outputs = ["reader_handle"]
def _text_line_reader_v2(skip_header_lines=None, container=None,
shared_name=None, name=None):
r"""A Reader that outputs the lines of a file delimited by '\n'.
Args:
skip_header_lines: An optional `int`. Defaults to `0`.
Number of lines to skip from the beginning of every file.
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("TextLineReaderV2",
skip_header_lines=skip_header_lines,
container=container, shared_name=shared_name,
name=name)
return result
__whole_file_reader_outputs = ["reader_handle"]
def _whole_file_reader(container=None, shared_name=None, name=None):
r"""A Reader that outputs the entire contents of a file as a value.
To use, enqueue filenames in a Queue. The output of ReaderRead will
be a filename (key) and the contents of that file (value).
Args:
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
name: A name for the operation (optional).
Returns:
A `Tensor` of type mutable `string`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("WholeFileReader", container=container,
shared_name=shared_name, name=name)
return result
__whole_file_reader_v2_outputs = ["reader_handle"]
def _whole_file_reader_v2(container=None, shared_name=None, name=None):
r"""A Reader that outputs the entire contents of a file as a value.
To use, enqueue filenames in a Queue. The output of ReaderRead will
be a filename (key) and the contents of that file (value).
Args:
container: An optional `string`. Defaults to `""`.
If non-empty, this reader is placed in the given container.
Otherwise, a default container is used.
shared_name: An optional `string`. Defaults to `""`.
If non-empty, this reader is named in the given bucket
with this shared_name. Otherwise, the node name is used instead.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `resource`. The handle to reference the Reader.
"""
result = _op_def_lib.apply_op("WholeFileReaderV2", container=container,
shared_name=shared_name, name=name)
return result
_write_file_outputs = [""]
def write_file(filename, contents, name=None):
r"""Writes contents to the file at input filename. Creates file if not existing.
Args:
filename: A `Tensor` of type `string`.
scalar. The name of the file to which we write the contents.
contents: A `Tensor` of type `string`.
scalar. The content to be written to the output file.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
result = _op_def_lib.apply_op("WriteFile", filename=filename,
contents=contents, name=name)
return result
def _InitOpDefLibrary():
op_list = _op_def_pb2.OpList()
_text_format.Merge(_InitOpDefLibrary.op_list_ascii, op_list)
_op_def_registry.register_op_list(op_list)
op_def_lib = _op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
_InitOpDefLibrary.op_list_ascii = """op {
name: "FixedLengthRecordReader"
output_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
attr {
name: "header_bytes"
type: "int"
default_value {
i: 0
}
}
attr {
name: "record_bytes"
type: "int"
}
attr {
name: "footer_bytes"
type: "int"
default_value {
i: 0
}
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "FixedLengthRecordReaderV2"
output_arg {
name: "reader_handle"
type: DT_RESOURCE
}
attr {
name: "header_bytes"
type: "int"
default_value {
i: 0
}
}
attr {
name: "record_bytes"
type: "int"
}
attr {
name: "footer_bytes"
type: "int"
default_value {
i: 0
}
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "IdentityReader"
output_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "IdentityReaderV2"
output_arg {
name: "reader_handle"
type: DT_RESOURCE
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "MatchingFiles"
input_arg {
name: "pattern"
type: DT_STRING
}
output_arg {
name: "filenames"
type: DT_STRING
}
}
op {
name: "MergeV2Checkpoints"
input_arg {
name: "checkpoint_prefixes"
type: DT_STRING
}
input_arg {
name: "destination_prefix"
type: DT_STRING
}
attr {
name: "delete_old_dirs"
type: "bool"
default_value {
b: true
}
}
}
op {
name: "ReadFile"
input_arg {
name: "filename"
type: DT_STRING
}
output_arg {
name: "contents"
type: DT_STRING
}
}
op {
name: "ReaderNumRecordsProduced"
input_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
output_arg {
name: "records_produced"
type: DT_INT64
}
}
op {
name: "ReaderNumRecordsProducedV2"
input_arg {
name: "reader_handle"
type: DT_RESOURCE
}
output_arg {
name: "records_produced"
type: DT_INT64
}
is_stateful: true
}
op {
name: "ReaderNumWorkUnitsCompleted"
input_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
output_arg {
name: "units_completed"
type: DT_INT64
}
}
op {
name: "ReaderNumWorkUnitsCompletedV2"
input_arg {
name: "reader_handle"
type: DT_RESOURCE
}
output_arg {
name: "units_completed"
type: DT_INT64
}
is_stateful: true
}
op {
name: "ReaderRead"
input_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
input_arg {
name: "queue_handle"
type: DT_STRING
is_ref: true
}
output_arg {
name: "key"
type: DT_STRING
}
output_arg {
name: "value"
type: DT_STRING
}
}
op {
name: "ReaderReadUpTo"
input_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
input_arg {
name: "queue_handle"
type: DT_STRING
is_ref: true
}
input_arg {
name: "num_records"
type: DT_INT64
}
output_arg {
name: "keys"
type: DT_STRING
}
output_arg {
name: "values"
type: DT_STRING
}
}
op {
name: "ReaderReadUpToV2"
input_arg {
name: "reader_handle"
type: DT_RESOURCE
}
input_arg {
name: "queue_handle"
type: DT_RESOURCE
}
input_arg {
name: "num_records"
type: DT_INT64
}
output_arg {
name: "keys"
type: DT_STRING
}
output_arg {
name: "values"
type: DT_STRING
}
is_stateful: true
}
op {
name: "ReaderReadV2"
input_arg {
name: "reader_handle"
type: DT_RESOURCE
}
input_arg {
name: "queue_handle"
type: DT_RESOURCE
}
output_arg {
name: "key"
type: DT_STRING
}
output_arg {
name: "value"
type: DT_STRING
}
is_stateful: true
}
op {
name: "ReaderReset"
input_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
}
op {
name: "ReaderResetV2"
input_arg {
name: "reader_handle"
type: DT_RESOURCE
}
is_stateful: true
}
op {
name: "ReaderRestoreState"
input_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
input_arg {
name: "state"
type: DT_STRING
}
}
op {
name: "ReaderRestoreStateV2"
input_arg {
name: "reader_handle"
type: DT_RESOURCE
}
input_arg {
name: "state"
type: DT_STRING
}
is_stateful: true
}
op {
name: "ReaderSerializeState"
input_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
output_arg {
name: "state"
type: DT_STRING
}
}
op {
name: "ReaderSerializeStateV2"
input_arg {
name: "reader_handle"
type: DT_RESOURCE
}
output_arg {
name: "state"
type: DT_STRING
}
is_stateful: true
}
op {
name: "Restore"
input_arg {
name: "file_pattern"
type: DT_STRING
}
input_arg {
name: "tensor_name"
type: DT_STRING
}
output_arg {
name: "tensor"
type_attr: "dt"
}
attr {
name: "dt"
type: "type"
}
attr {
name: "preferred_shard"
type: "int"
default_value {
i: -1
}
}
}
op {
name: "RestoreSlice"
input_arg {
name: "file_pattern"
type: DT_STRING
}
input_arg {
name: "tensor_name"
type: DT_STRING
}
input_arg {
name: "shape_and_slice"
type: DT_STRING
}
output_arg {
name: "tensor"
type_attr: "dt"
}
attr {
name: "dt"
type: "type"
}
attr {
name: "preferred_shard"
type: "int"
default_value {
i: -1
}
}
}
op {
name: "RestoreV2"
input_arg {
name: "prefix"
type: DT_STRING
}
input_arg {
name: "tensor_names"
type: DT_STRING
}
input_arg {
name: "shape_and_slices"
type: DT_STRING
}
output_arg {
name: "tensors"
type_list_attr: "dtypes"
}
attr {
name: "dtypes"
type: "list(type)"
has_minimum: true
minimum: 1
}
}
op {
name: "Save"
input_arg {
name: "filename"
type: DT_STRING
}
input_arg {
name: "tensor_names"
type: DT_STRING
}
input_arg {
name: "data"
type_list_attr: "T"
}
attr {
name: "T"
type: "list(type)"
has_minimum: true
minimum: 1
}
}
op {
name: "SaveSlices"
input_arg {
name: "filename"
type: DT_STRING
}
input_arg {
name: "tensor_names"
type: DT_STRING
}
input_arg {
name: "shapes_and_slices"
type: DT_STRING
}
input_arg {
name: "data"
type_list_attr: "T"
}
attr {
name: "T"
type: "list(type)"
has_minimum: true
minimum: 1
}
}
op {
name: "SaveV2"
input_arg {
name: "prefix"
type: DT_STRING
}
input_arg {
name: "tensor_names"
type: DT_STRING
}
input_arg {
name: "shape_and_slices"
type: DT_STRING
}
input_arg {
name: "tensors"
type_list_attr: "dtypes"
}
attr {
name: "dtypes"
type: "list(type)"
has_minimum: true
minimum: 1
}
}
op {
name: "ShardedFilename"
input_arg {
name: "basename"
type: DT_STRING
}
input_arg {
name: "shard"
type: DT_INT32
}
input_arg {
name: "num_shards"
type: DT_INT32
}
output_arg {
name: "filename"
type: DT_STRING
}
}
op {
name: "ShardedFilespec"
input_arg {
name: "basename"
type: DT_STRING
}
input_arg {
name: "num_shards"
type: DT_INT32
}
output_arg {
name: "filename"
type: DT_STRING
}
}
op {
name: "TFRecordReader"
output_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
attr {
name: "compression_type"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "TFRecordReaderV2"
output_arg {
name: "reader_handle"
type: DT_RESOURCE
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
attr {
name: "compression_type"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "TextLineReader"
output_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
attr {
name: "skip_header_lines"
type: "int"
default_value {
i: 0
}
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "TextLineReaderV2"
output_arg {
name: "reader_handle"
type: DT_RESOURCE
}
attr {
name: "skip_header_lines"
type: "int"
default_value {
i: 0
}
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "WholeFileReader"
output_arg {
name: "reader_handle"
type: DT_STRING
is_ref: true
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "WholeFileReaderV2"
output_arg {
name: "reader_handle"
type: DT_RESOURCE
}
attr {
name: "container"
type: "string"
default_value {
s: ""
}
}
attr {
name: "shared_name"
type: "string"
default_value {
s: ""
}
}
is_stateful: true
}
op {
name: "WriteFile"
input_arg {
name: "filename"
type: DT_STRING
}
input_arg {
name: "contents"
type: DT_STRING
}
}
"""
_op_def_lib = _InitOpDefLibrary()
| 26.556382
| 82
| 0.652369
|
828d322e4d383d57bb84eddf5c81364b055ae194
| 8,030
|
py
|
Python
|
lib/calistra_lib/storage/json_serializer.py
|
igorxxl8/Calistra
|
ced32a53f42a8d7a2309a1eb15acef42418a3ecb
|
[
"MIT"
] | null | null | null |
lib/calistra_lib/storage/json_serializer.py
|
igorxxl8/Calistra
|
ced32a53f42a8d7a2309a1eb15acef42418a3ecb
|
[
"MIT"
] | null | null | null |
lib/calistra_lib/storage/json_serializer.py
|
igorxxl8/Calistra
|
ced32a53f42a8d7a2309a1eb15acef42418a3ecb
|
[
"MIT"
] | null | null | null |
import json
def concat(*args):
return ''.join(args)
class Serializable:
"""
This class using as indicator for models and shows that it can serialize
"""
def __iter__(self):
pass
def __getitem__(self):
pass
class JsonDatabase:
"""
This class describe mechanism which parse data in json format
"""
def __init__(self, filename, cls_seq=None):
"""
Init method. Calling after creating instance
:param filename: file where instance stored
:param cls_seq: sequence of classes represent nested objects
of instance
"""
if cls_seq is None:
cls_seq = []
self.cls_seq = cls_seq
self.filename = filename
def load(self) -> list:
"""
This method parse from json object in python object
:return: instance
"""
with open(self.filename, 'r') as file:
s = file.read()
return from_json(self.cls_seq, s)
def unload(self, instance):
"""
This method parse python object to string in json format
:param instance: instance for parsing
:return: None
"""
with open(self.filename, 'w') as file:
file.write(to_json(instance, indentation=4))
def array_to_json(array, ctrl_char, indent, level):
"""
This method parse array into json
:param array: array for parsing
:param ctrl_char: control char
:param indent:
:param level:
:return: string in json format
"""
if not array:
return '[]'
result = ''.join(['[', ctrl_char, ' ' * level * indent])
size = len(array)
for i in range(size):
item = array[i]
if i == size - 1:
# if array item is last in array to result string will be appended
# right square bracket because json-string is ready
result = ''.join(
[
result,
to_json(item, indent, level+1),
ctrl_char,
' ' * (level - 1) * indent, ']'
]
)
else:
result = ''.join(
[
result,
to_json(item, indent, level+1),
',',
ctrl_char,
' ' * level * indent
]
)
return result
def to_json(instance=None, indentation=None, level=1) -> str:
"""
This method parse instance into json format
:param instance: instance for parsing
:param indentation:
:param level: level of indentation
:return: string in json format
"""
# try to parse object by default json parser
try:
res = json.dumps(instance, indent=indentation)
except TypeError:
pass
else:
return res
# if indentation is define use it for parsing objects with indentation
if indentation is None:
indent = 0
ctrl_char = ''
else:
indent = indentation
ctrl_char = '\n'
# parse non-list objects
if not isinstance(instance, list):
# get instance args as dict if it possible else attrs will be defined
# as instance
try:
attrs = vars(instance).copy()
except TypeError:
attrs = instance
# form begin of result string in json format
res = ''.join(['{', ctrl_char, ' ' * level * indent])
while attrs:
# parse every instance attr in json format
key, value = attrs.popitem()
if isinstance(value, list):
# if attr is list parse it as array
res = ''.join(
[res, '"', key, '"', ': ',
array_to_json(value, ctrl_char, indent, level + 1)]
)
elif isinstance(value, Serializable):
# if attr is Serializable object instance parse it as instance
res = ''.join(
[res, '"', key, '"', ': ', to_json(value, indent, level + 1)]
)
elif isinstance(value, int):
# if attr is int, it append to result string
res = ''.join(
[res, '"', key, '"', ': ', str(value).lower()]
)
elif value is None:
# if attr is None, append null to result
res = ''.join(
[res, '"', key, '"', ': ', 'null']
)
else:
res = ''.join(
[res, '"', key, '"', ': ', '"', str(value), '"']
)
if attrs:
# if attrs not empty in string append comma
res = ''.join([res, ',', ctrl_char, ' ' * level * indent])
# end of parsing non list object
return ''.join([res, ctrl_char, ' ' * (level-1) * indent, '}'])
# parse list objects
return array_to_json(instance, ctrl_char, indentation, level)
def from_json(cls_seq: list, string):
"""
Parse string in json format and return instance
:param cls_seq: sequence of classes of object inside instance
:param string: string in json format
:return: instance
"""
def set_dict_attr(instance, data, num):
"""
This nested function set value of all instance attr
:param instance: instance to set it attrs
:param data:
:param num:
:return:
"""
for key, value in data.items():
if isinstance(value, list):
instance[key] = make_objects_array(num + 1, value)
elif isinstance(value, dict):
instance[key] = make_object(num + 1, value)
else:
instance[key] = value
def set_object_attr(instance, data, num):
"""
For object set its attrs
:param instance:
:param data:
:param num: number of class in class sequence
:return: None
"""
for key, value in data.items():
if isinstance(value, list):
# for key set result of making list of objects if value is list
setattr(instance, key, make_objects_array(num + 1, value))
elif isinstance(value, dict):
# for key set result of making simple object if value is dict
setattr(instance, key, make_object(num + 1, value))
else:
setattr(instance, key, value)
def make_object(num, data):
"""
This method collect object
:param num: number of class in class sequence
:param data:
:return: instance
"""
if cls_seq[num] is dict:
# if instance is dict, create a dict
instance = dict.__new__(dict)
set_dict_attr(instance, data, num)
else:
# if instance is object, create object
instance = object.__new__(cls_seq[num])
set_object_attr(instance, data, num)
return instance
def make_objects_array(num, array):
"""
This method create array of objects
:param num:
:param array:
:return: array of objects
"""
entity = []
for item in array:
if isinstance(item, list):
# append a list of nested instances objects
entity.append(make_objects_array(num + 1, item))
elif isinstance(item, dict):
# append simple object
entity.append(make_object(num, item))
else:
entity.append(item)
return entity
# using standart json library, from parse string in json format in
# set of dicts and lists
py_objs = json.loads(string)
if isinstance(py_objs, list):
return make_objects_array(0, py_objs)
if isinstance(py_objs, int):
return py_objs
if isinstance(py_objs, str):
return py_objs
return make_object(0, py_objs)
| 30.766284
| 81
| 0.527397
|
aa13a5a56cb1e50e75472c74f8aecebb31b47985
| 2,851
|
bzl
|
Python
|
apple/internal/resource_rules/apple_resource_group.bzl
|
tnek/rules_apple
|
739aa74febeb95902dded57f7a49c85c1f153756
|
[
"Apache-2.0"
] | 313
|
2017-03-29T21:47:08.000Z
|
2022-03-29T03:09:50.000Z
|
apple/internal/resource_rules/apple_resource_group.bzl
|
tnek/rules_apple
|
739aa74febeb95902dded57f7a49c85c1f153756
|
[
"Apache-2.0"
] | 786
|
2017-03-30T16:15:59.000Z
|
2022-03-31T19:58:05.000Z
|
apple/internal/resource_rules/apple_resource_group.bzl
|
tnek/rules_apple
|
739aa74febeb95902dded57f7a49c85c1f153756
|
[
"Apache-2.0"
] | 172
|
2017-04-24T01:55:24.000Z
|
2022-03-25T19:23:31.000Z
|
# Copyright 2019 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of apple_resource_group rule."""
def _apple_resource_group_impl(ctx):
# All of the resource processing logic for this rule exists in the apple_resource_aspect.
#
# To transform the attributes referenced by this rule into resource providers, that aspect must
# be used to iterate through all relevant instances of this rule in the build graph.
return []
apple_resource_group = rule(
implementation = _apple_resource_group_impl,
attrs = {
"resources": attr.label_list(
allow_empty = True,
allow_files = True,
doc = """
Files to include in the final bundle that depends on this target. Files that are processable
resources, like .xib, .storyboard, .strings, .png, and others, will be processed by the Apple
bundling rules that have those files as dependencies. Other file types that are not processed will
be copied verbatim. These files are placed in the root of the final bundle (e.g.
Payload/foo.app/...) in most cases. However, if they appear to be localized (i.e. are contained in a
directory called *.lproj), they will be placed in a directory of the same name in the app bundle.
You can also add apple_resource_bundle and apple_bundle_import targets into `resources`, and the
resource bundle structures will be propagated into the final bundle.
""",
),
"structured_resources": attr.label_list(
allow_empty = True,
allow_files = True,
doc = """
Files to include in the final application bundle. They are not processed or compiled in any way
besides the processing done by the rules that actually generate them. These files are placed in the
bundle root in the same structure passed to this argument, so `["res/foo.png"]` will end up in
`res/foo.png` inside the bundle.
""",
),
},
doc = """
This rule encapsulates a target which provides resources to dependents. An
`apple_resource_group`'s `resources` and `structured_resources` are put in the
top-level Apple bundle target. `apple_resource_group` targets need to be added
to library targets through the `data` attribute, or to other
`apple_resource_bundle` or `apple_resource_group` targets through the
`resources` attribute.
""",
)
| 45.983871
| 100
| 0.737987
|
b2b027aa8d29e2d5cacaebdcdabfd726301e1e75
| 246
|
py
|
Python
|
pos/manage.py
|
NonnEmilia/OpenGenfri
|
7061957fb13ef824763922e1891cb72f7d51bb0f
|
[
"MIT"
] | null | null | null |
pos/manage.py
|
NonnEmilia/OpenGenfri
|
7061957fb13ef824763922e1891cb72f7d51bb0f
|
[
"MIT"
] | null | null | null |
pos/manage.py
|
NonnEmilia/OpenGenfri
|
7061957fb13ef824763922e1891cb72f7d51bb0f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pos.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 22.363636
| 67
| 0.768293
|
a9d4a04f0330b980bf19773fe5490c5b61054e05
| 4,770
|
py
|
Python
|
learned_optimization/baselines/normalizers.py
|
google/learned_optimization
|
1c9ee0159c97815fc6afe79a76224fb28b199053
|
[
"Apache-2.0"
] | 70
|
2021-12-16T07:12:11.000Z
|
2022-03-31T19:13:36.000Z
|
learned_optimization/baselines/normalizers.py
|
google/learned_optimization
|
1c9ee0159c97815fc6afe79a76224fb28b199053
|
[
"Apache-2.0"
] | 10
|
2021-12-29T10:03:37.000Z
|
2022-03-22T15:59:55.000Z
|
learned_optimization/baselines/normalizers.py
|
google/learned_optimization
|
1c9ee0159c97815fc6afe79a76224fb28b199053
|
[
"Apache-2.0"
] | 5
|
2021-12-16T04:52:35.000Z
|
2022-03-22T03:45:31.000Z
|
# coding=utf-8
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Normalization across different tasks.
The losses of different tasks can vary a huge amount. This is problematic when
trying to compare performance across a large mixture of tasks.
To remedy this, we can "normalize" each different task. These normalizations
are often built by running hand designed optimizers, and rescaling based on
these.
"""
from concurrent import futures
import functools
import json
import os
from typing import Any, Callable, Mapping, Sequence
import chex
import jax.numpy as jnp
from learned_optimization import filesystem
from learned_optimization import jax_utils
from learned_optimization.baselines import utils
import numpy as onp
import tqdm
NormData = Any
NormFN = Callable[[jnp.ndarray], jnp.ndarray]
def ema(data: chex.Array, alpha: float, ignore_nan=False):
"""Exponential moving average."""
# TODO(lmetz) dedup with notebook_utils!
if len(data) == 0: # pylint: disable=g-explicit-length-test
return data
data = onp.asarray(data)
x = onp.zeros_like(data)
x[0] = data[0]
m_alpha = alpha
# TODO(lmetz) profile if this is needed / saves much time.
if ignore_nan:
for i, a in enumerate((1 - alpha) * data[1:]):
x[i + 1] = x[i] if onp.isnan(a) else x[i] * m_alpha + a
else:
for i, a in enumerate((1 - alpha) * data[1:]):
x[i + 1] = x[i] * m_alpha + a
return x
def threaded_tqdm_map(threads: int, func: Callable[[Any], Any],
data: Sequence[Any]) -> Sequence[Any]:
# TODO(lmetz) dedup with notebook_utils!
future_list = []
with futures.ThreadPoolExecutor(threads) as executor:
for l in tqdm.tqdm(data):
future_list.append(executor.submit(func, l))
return [x.result() for x in tqdm.tqdm(future_list)]
def _speedup_over_adam_build(task_name: str) -> NormData:
"""Construct data needed for normalization function."""
big_adam = utils.load_archive(task_name, "AdamLR_100000_R5")
emaed_curves = [
ema(c, 0.95) for c in onp.mean(big_adam["eval/train/loss"], axis=1)
]
xs = big_adam["eval/xs"][0][0]
bottom_env = onp.nanmin(emaed_curves, axis=0)
num_pieces = 512
xp = onp.linspace(0, xs[-1], num_pieces)
yp = onp.interp(xp, xs, bottom_env)
yp = onp.minimum.accumulate(yp)
return (xp.tolist(), yp.tolist())
def _speedup_over_adam_make_func(norm_data: NormData) -> NormFN:
"""Build the function that does the actual normalization.
Args:
norm_data: data created from `_speedup_over_adam_build`.
Returns:
Function which normalizes the givien inputs.
"""
xp, yp = norm_data
xp = onp.asarray(xp)[::-1]
yp = onp.asarray(yp)[::-1]
def fn(x):
ret = jax_utils.cached_jit(jnp.interp)(x, yp, xp)
return jnp.where(jnp.isfinite(ret), ret, 0.0)
return fn
def _one_line_dumps(dd):
content = "{\n"
lines = []
for l, n in sorted(dd.items(), key=lambda x: x[0]):
lines.append("\"%s\":%s" % (l, json.dumps(n)))
content += ",\n".join(lines)
content += "\n}"
return content
def speedup_over_adam_build_and_write(tasks: Sequence[str],
output_path: str,
overwrite: bool = False):
"""Build and append the normalization data for the provided set of tasks."""
flat_norm_datas = threaded_tqdm_map(32, _speedup_over_adam_build, tasks)
if filesystem.exists(output_path):
with filesystem.file_open(output_path, "r") as f:
data_dict = json.loads(f.read())
else:
data_dict = {}
for d, t in zip(flat_norm_datas, tasks):
if t not in data_dict or overwrite:
data_dict[t] = d
else:
raise ValueError(f"Duplicate found for {t}")
content = _one_line_dumps(data_dict)
with filesystem.file_open(output_path, "w") as f:
f.write(content)
return content
@functools.lru_cache(None)
def speedup_over_adam_normalizer_map() -> Mapping[str, NormFN]:
"""Load the precomputed dictionary mapping from task name to a norm func."""
path = os.path.join(
os.path.dirname(__file__), "data", "speedup_over_adam.json")
with filesystem.file_open(path, "r") as f:
data_dict = json.loads(f.read())
return {k: _speedup_over_adam_make_func(d) for k, d in data_dict.items()}
| 30.189873
| 78
| 0.692453
|
c6b6f6bc872ac993a5831509002ce3e6c0fd552b
| 9,438
|
py
|
Python
|
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/cisco/ios/tests/unit/modules/network/ios/test_ios_bgp_global.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/cisco/ios/tests/unit/modules/network/ios/test_ios_bgp_global.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/cisco/ios/tests/unit/modules/network/ios/test_ios_bgp_global.py
|
Stienvdh/statrick
|
7b092fc42171e226718a70a285a4b323f2f395ad
|
[
"MIT"
] | null | null | null |
#
# (c) 2019, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.cisco.ios.tests.unit.compat.mock import patch
from ansible_collections.cisco.ios.plugins.modules import ios_bgp_global
from ansible_collections.cisco.ios.tests.unit.modules.utils import (
set_module_args,
)
from .ios_module import TestIosModule, load_fixture
class TestIosBgpGlobalModule(TestIosModule):
module = ios_bgp_global
def setUp(self):
super(TestIosBgpGlobalModule, self).setUp()
self.mock_get_config = patch(
"ansible_collections.ansible.netcommon.plugins.module_utils.network.common.network.Config.get_config"
)
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
"ansible_collections.ansible.netcommon.plugins.module_utils.network.common.network.Config.load_config"
)
self.load_config = self.mock_load_config.start()
self.mock_get_resource_connection_config = patch(
"ansible_collections.ansible.netcommon.plugins.module_utils.network.common.cfg.base."
"get_resource_connection"
)
self.get_resource_connection_config = (
self.mock_get_resource_connection_config.start()
)
self.mock_get_resource_connection_facts = patch(
"ansible_collections.ansible.netcommon.plugins.module_utils.network.common.resource_module."
"get_resource_connection"
)
self.get_resource_connection_facts = (
self.mock_get_resource_connection_facts.start()
)
self.mock_edit_config = patch(
"ansible_collections.cisco.ios.plugins.module_utils.network.ios.providers.providers.CliProvider.edit_config"
)
self.edit_config = self.mock_edit_config.start()
self.mock_execute_show_command = patch(
"ansible_collections.cisco.ios.plugins.module_utils.network.ios.facts.bgp_global.bgp_global."
"Bgp_globalFacts.get_bgp_global_data"
)
self.execute_show_command = self.mock_execute_show_command.start()
def tearDown(self):
super(TestIosBgpGlobalModule, self).tearDown()
self.mock_get_resource_connection_config.stop()
self.mock_get_resource_connection_facts.stop()
self.mock_edit_config.stop()
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_execute_show_command.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
return load_fixture("ios_bgp_global.cfg")
self.execute_show_command.side_effect = load_from_file
def test_ios_bgp_global_merged(self):
set_module_args(
dict(
config=dict(
as_number="65000",
bgp=dict(
dampening=dict(
penalty_half_time=1,
reuse_route_val=1,
suppress_route_val=1,
max_suppress=1,
),
graceful_shutdown=dict(
neighbors=dict(time=50),
community=100,
local_preference=100,
),
),
neighbor=[
dict(
address="198.51.100.1",
description="merge neighbor",
aigp=dict(
send=dict(
cost_community=dict(
id=100,
poi=dict(
igp_cost=True, transitive=True
),
)
)
),
)
],
),
state="merged",
)
)
commands = [
"router bgp 65000",
"bgp graceful-shutdown all neighbors 50 local-preference 100 community 100",
"bgp dampening 1 1 1 1",
"neighbor 198.51.100.1 aigp send cost-community 100 poi igp-cost transitive",
"neighbor 198.51.100.1 description merge neighbor",
]
result = self.execute_module(changed=True)
self.assertEqual(sorted(result["commands"]), sorted(commands))
def test_ios_bgp_global_merged_idempotent(self):
set_module_args(
dict(
config=dict(
as_number="65000",
bgp=dict(
advertise_best_external=True,
bestpath=[dict(compare_routerid=True)],
nopeerup_delay=[dict(post_boot=10)],
),
redistribute=[dict(connected=dict(metric=10))],
neighbor=[
dict(
address="198.51.100.1",
remote_as=100,
route_map=dict(name="test-route", out=True),
)
],
timers=dict(keepalive=100, holdtime=200, min_holdtime=150),
),
state="merged",
)
)
self.execute_module(changed=False, commands=[])
def test_ios_bgp_global_replaced(self):
set_module_args(
dict(
config=dict(
as_number="65000",
bgp=dict(
advertise_best_external=True,
bestpath=[dict(compare_routerid=True)],
log_neighbor_changes=True,
nopeerup_delay=[
dict(cold_boot=20),
dict(post_boot=10),
],
),
redistribute=[dict(connected=dict(metric=10))],
neighbor=[
dict(
address="192.0.2.1",
remote_as=200,
description="replace neighbor",
)
],
),
state="replaced",
)
)
commands = [
"bgp nopeerup-delay cold-boot 20",
"neighbor 192.0.2.1 description replace neighbor",
"neighbor 192.0.2.1 remote-as 200",
"no neighbor 198.51.100.1 remote-as 100",
"no neighbor 198.51.100.1 route-map test-route out",
"no timers bgp 100 200 150",
"router bgp 65000",
]
result = self.execute_module(changed=True)
self.assertEqual(sorted(result["commands"]), sorted(commands))
def test_ios_bgp_global_replaced_idempotent(self):
set_module_args(
dict(
config=dict(
as_number="65000",
bgp=dict(
advertise_best_external=True,
bestpath=[dict(compare_routerid=True)],
nopeerup_delay=[dict(post_boot=10)],
),
redistribute=[dict(connected=dict(metric=10))],
neighbor=[
dict(
address="198.51.100.1",
remote_as=100,
route_map=dict(name="test-route", out=True),
)
],
timers=dict(keepalive=100, holdtime=200, min_holdtime=150),
),
state="replaced",
)
)
self.execute_module(changed=False, commands=[])
def test_ios_bgp_global_deleted(self):
set_module_args(dict(config=dict(as_number=65000), state="deleted"))
commands = [
"router bgp 65000",
"no bgp nopeerup-delay post-boot 10",
"no bgp bestpath compare-routerid",
"no bgp advertise-best-external",
"no timers bgp 100 200 150",
"no redistribute connected metric 10",
"no neighbor 198.51.100.1 remote-as 100",
"no neighbor 198.51.100.1 route-map test-route out",
]
result = self.execute_module(changed=True)
self.assertEqual(sorted(result["commands"]), sorted(commands))
def test_ios_bgp_global_purged(self):
set_module_args(dict(config=dict(as_number=65000), state="purged"))
commands = ["no router bgp 65000"]
self.execute_module(changed=True, commands=commands)
def test_ios_bgp_global_parsed(self):
set_module_args(
dict(
running_config="router bgp 65000\n bgp nopeerup-delay post-boot 10",
state="parsed",
)
)
result = self.execute_module(changed=False)
parsed_list = {
"as_number": "65000",
"bgp": {"nopeerup_delay": [{"post_boot": 10}]},
}
self.assertEqual(parsed_list, result["parsed"])
| 38.522449
| 120
| 0.515787
|
5c0be0518026c4b26be526f8fed5656ab73446d0
| 3,133
|
py
|
Python
|
tests/functions_tests/test_mellowmax.py
|
cnheider/chainerrl
|
018a29132d77e5af0f92161250c72aba10c6ce29
|
[
"MIT"
] | 923
|
2017-06-01T08:27:42.000Z
|
2022-03-24T02:17:04.000Z
|
tests/functions_tests/test_mellowmax.py
|
hardmaru/chainerrl
|
018a29132d77e5af0f92161250c72aba10c6ce29
|
[
"MIT"
] | 374
|
2017-06-02T02:07:50.000Z
|
2021-06-29T22:05:38.000Z
|
tests/functions_tests/test_mellowmax.py
|
hardmaru/chainerrl
|
018a29132d77e5af0f92161250c72aba10c6ce29
|
[
"MIT"
] | 253
|
2017-06-04T10:31:50.000Z
|
2022-03-19T15:20:51.000Z
|
import unittest
from chainer import cuda
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
import numpy
from chainerrl.functions.mellowmax import maximum_entropy_mellowmax
from chainerrl.functions.mellowmax import mellowmax
@testing.parameterize(*testing.product({
'shape': [(1, 1), (2, 3), (2, 3, 4), (2, 3, 4, 5)],
'dtype': [numpy.float32],
'omega': [10, 5, 1, -1, -5, -10],
'axis': [0, 1, -1, -2],
'same_value': [True, False],
}))
class TestMellowmax(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
if self.same_value:
self.x[:] = numpy.random.uniform(-1, 1, 1).astype(self.dtype)
def check_forward(self, x_data):
xp = cuda.get_array_module(x_data)
y = mellowmax(x_data, axis=self.axis, omega=self.omega)
self.assertEqual(y.array.dtype, self.dtype)
x_min = xp.min(x_data, axis=self.axis)
x_max = xp.max(x_data, axis=self.axis)
x_mean = xp.mean(x_data, axis=self.axis)
print('x_min', x_min)
print('y.array', y.array)
# min <= mellowmax <= max
eps = 1e-5
self.assertTrue(xp.all(x_min <= y.array + eps))
self.assertTrue(xp.all(x_max >= y.array - eps))
# omega > 0 -> mellowmax is more like max
if self.omega > 0:
self.assertTrue(xp.all(x_mean <= y.array + eps))
# omega < 0 -> mellowmax is more like min
if self.omega < 0:
self.assertTrue(xp.all(x_mean >= y.array - eps))
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x))
@testing.parameterize(*testing.product({
'shape': [(1, 1), (2, 3), (2, 3, 4), (2, 3, 4, 5)],
'dtype': [numpy.float32],
'omega': [10, 5, 1, 0, -1, -5, -10],
'same_value': [True, False],
}))
class TestMaximumEntropyMellowmax(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, self.shape).astype(self.dtype)
if self.same_value:
self.x[:] = numpy.random.uniform(-1, 1, 1).astype(self.dtype)
def check_forward(self, x_data):
xp = cuda.get_array_module(x_data)
y = maximum_entropy_mellowmax(x_data)
self.assertEqual(y.array.dtype, self.dtype)
print('y', y.array)
# Outputs must be positive
xp.testing.assert_array_less(xp.zeros_like(y.array), y.array)
# Sums must be ones
sums = xp.sum(y.array, axis=1)
testing.assert_allclose(sums, xp.ones_like(sums))
# Expectations must be equal to memllowmax's outputs
testing.assert_allclose(
xp.sum(y.array * x_data, axis=1), mellowmax(x_data, axis=1).array)
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
| 30.715686
| 78
| 0.620492
|
d2921b5febc711ddc8cc22ac82a74f680b14734d
| 13,486
|
py
|
Python
|
safeopt/utilities.py
|
vishalbelsare/SafeOpt
|
06f4579779af17d6a28199ab69c379f0d18bc5f4
|
[
"MIT"
] | 107
|
2015-09-04T15:12:14.000Z
|
2022-03-18T09:03:55.000Z
|
gosafeopt/utilities.py
|
Data-Science-in-Mechanical-Engineering/Contextual-GoSafe
|
c7d05a866cec0993609838ac2c5adc14ac3754ab
|
[
"MIT"
] | 5
|
2019-12-25T02:48:51.000Z
|
2021-11-26T10:52:44.000Z
|
gosafeopt/utilities.py
|
Data-Science-in-Mechanical-Engineering/Contextual-GoSafe
|
c7d05a866cec0993609838ac2c5adc14ac3754ab
|
[
"MIT"
] | 48
|
2015-09-04T00:08:01.000Z
|
2022-01-24T14:15:23.000Z
|
"""
Utilities for the safeopt library (e.g., sampling).
Author: Felix Berkenkamp (befelix at inf dot ethz dot ch)
"""
from __future__ import print_function, absolute_import, division
from collections import Sequence # isinstance(...,Sequence)
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D # Create 3D axes
from matplotlib import cm # 3D plot colors
__all__ = ['linearly_spaced_combinations', 'sample_gp_function', 'plot_2d_gp',
'plot_3d_gp', 'plot_contour_gp']
def linearly_spaced_combinations(bounds, num_samples):
"""
Return 2-D array with all linearly spaced combinations with the bounds.
Parameters
----------
bounds: sequence of tuples
The bounds for the variables, [(x1_min, x1_max), (x2_min, x2_max), ...]
num_samples: integer or array_likem
Number of samples to use for every dimension. Can be a constant if
the same number should be used for all, or an array to fine-tune
precision. Total number of data points is num_samples ** len(bounds).
Returns
-------
combinations: 2-d array
A 2-d arrray. If d = len(bounds) and l = prod(num_samples) then it
is of size l x d, that is, every row contains one combination of
inputs.
"""
num_vars = len(bounds)
if not isinstance(num_samples, Sequence):
num_samples = [num_samples] * num_vars
if len(bounds) == 1:
return np.linspace(bounds[0][0], bounds[0][1], num_samples[0])[:, None]
# Create linearly spaced test inputs
inputs = [np.linspace(b[0], b[1], n) for b, n in zip(bounds,
num_samples)]
# Convert to 2-D array
return np.array([x.ravel() for x in np.meshgrid(*inputs)]).T
def sample_gp_function(kernel, bounds, noise_var, num_samples,
interpolation='kernel', mean_function=None):
"""
Sample a function from a gp with corresponding kernel within its bounds.
Parameters
----------
kernel: instance of GPy.kern.*
bounds: list of tuples
[(x1_min, x1_max), (x2_min, x2_max), ...]
noise_var: float
Variance of the observation noise of the GP function
num_samples: int or list
If integer draws the corresponding number of samples in all
dimensions and test all possible input combinations. If a list then
the list entries correspond to the number of linearly spaced samples of
the corresponding input
interpolation: string
If 'linear' interpolate linearly between samples, if 'kernel' use the
corresponding mean RKHS-function of the GP.
mean_function: callable
Mean of the sample function
Returns
-------
function: object
function(x, noise=True)
A function that takes as inputs new locations x to be evaluated and
returns the corresponding noisy function values. If noise=False is
set the true function values are returned (useful for plotting).
"""
inputs = linearly_spaced_combinations(bounds, num_samples)
cov = kernel.K(inputs) + np.eye(inputs.shape[0]) * 1e-6
output = np.random.multivariate_normal(np.zeros(inputs.shape[0]),
cov)
if interpolation == 'linear':
def evaluate_gp_function_linear(x, noise=True):
"""
Evaluate the GP sample function with linear interpolation.
Parameters
----------
x: np.array
2D array with inputs
noise: bool
Whether to include prediction noise
"""
x = np.atleast_2d(x)
y = sp.interpolate.griddata(inputs, output, x, method='linear')
# Work around weird dimension squishing in griddata
y = np.atleast_2d(y.squeeze()).T
if mean_function is not None:
y += mean_function(x)
if noise:
y += np.sqrt(noise_var) * np.random.randn(x.shape[0], 1)
return y
return evaluate_gp_function_linear
elif interpolation == 'kernel':
cho_factor = sp.linalg.cho_factor(cov)
alpha = sp.linalg.cho_solve(cho_factor, output)
def evaluate_gp_function_kernel(x, noise=True):
"""
Evaluate the GP sample function with kernel interpolation.
Parameters
----------
x: np.array
2D array with inputs
noise: bool
Whether to include prediction noise
"""
x = np.atleast_2d(x)
y = kernel.K(x, inputs).dot(alpha)
y = y[:, None]
if mean_function is not None:
y += mean_function(x)
if noise:
y += np.sqrt(noise_var) * np.random.randn(x.shape[0], 1)
return y
return evaluate_gp_function_kernel
def plot_2d_gp(gp, inputs, predictions=None, figure=None, axis=None,
fixed_inputs=None, beta=3, fmin=None, **kwargs):
"""
Plot a 2D GP with uncertainty.
Parameters
----------
gp: Instance of GPy.models.GPRegression
inputs: 2darray
The input parameters at which the GP is to be evaluated
predictions: ndarray
Can be used to manually pass the GP predictions, set to None to
use the gp directly. Is of the form (mean, variance)
figure: matplotlib figure
The figure on which to draw (ignored if axis is provided
axis: matplotlib axis
The axis on which to draw
fixed_inputs: list
A list containing the the fixed inputs and their corresponding
values, e.g., [(0, 3.2), (4, -2.43)]. Set the value to None if
it's not fixed, but should not be a plotted axis either
beta: float
The confidence interval used
fmin : float
The safety threshold value.
Returns
-------
axis
"""
if fixed_inputs is None:
if gp.kern.input_dim > 1:
raise NotImplementedError('This only works for 1D inputs')
fixed_inputs = []
elif gp.kern.input_dim - len(fixed_inputs) != 1:
raise NotImplemented('This only works for 1D inputs')
ms = kwargs.pop('ms', 10)
mew = kwargs.pop('mew', 3)
point_color = kwargs.pop('point_color', 'k')
if axis is None:
if figure is None:
figure = plt.figure()
axis = figure.gca()
else:
axis = figure.gca()
# Get a list of unfixed inputs to plot
unfixed = list(range(gp.kern.input_dim))
for dim, val in fixed_inputs:
if val is not None:
inputs[:, dim] = val
unfixed.remove(dim)
# Compute GP predictions if not provided
if predictions is None:
mean, var = gp._raw_predict(inputs)
else:
mean, var = predictions
output = mean.squeeze()
std_dev = beta * np.sqrt(var.squeeze())
axis.fill_between(inputs[:, unfixed[0]],
output - std_dev,
output + std_dev,
facecolor='blue',
alpha=0.3)
axis.plot(inputs[:, unfixed[0]], output, **kwargs)
axis.scatter(gp.X[:-1, unfixed[0]], gp.Y[:-1, 0], s=20 * ms,
marker='x', linewidths=mew, color=point_color)
axis.scatter(gp.X[-1, unfixed[0]], gp.Y[-1, 0], s=20 * ms,
marker='x', linewidths=mew, color='r')
axis.set_xlim([np.min(inputs[:, unfixed[0]]),
np.max(inputs[:, unfixed[0]])])
if fmin is not None:
axis.plot(inputs[[0, -1], unfixed[0]], [fmin, fmin], 'k--')
return axis
def plot_3d_gp(gp, inputs, predictions=None, figure=None, axis=None,
fixed_inputs=None, beta=3, **kwargs):
"""
Plot a 3D gp with uncertainty.
Parameters
----------
gp: Instance of GPy.models.GPRegression
inputs: 2darray
The input parameters at which the GP is to be evaluated
predictions: ndarray
Can be used to manually pass the GP predictions, set to None to
use the gp directly. Is of the form [mean, variance]
figure: matplotlib figure
The figure on which to draw (ignored if axis is provided
axis: matplotlib axis
The axis on which to draw
fixed_inputs: list
A list containing the the fixed inputs and their corresponding
values, e.g., [(0, 3.2), (4, -2.43)]. Set the value to None if
it's not fixed, but should not be a plotted axis either
beta: float
The confidence interval used
Returns
-------
surface: matplotlib trisurf plot
data: matplotlib plot for data points
"""
if fixed_inputs is None:
if gp.kern.input_dim > 2:
raise NotImplementedError('This only works for 2D inputs')
fixed_inputs = []
elif gp.kern.input_dim - len(fixed_inputs) != 2:
raise NotImplemented('Only two inputs can be unfixed')
if axis is None:
if figure is None:
figure = plt.figure()
axis = Axes3D(figure)
else:
axis = Axes3D(figure)
# Get a list of unfixed inputs to plot
unfixed = list(range(gp.kern.input_dim))
for dim, val in fixed_inputs:
if val is not None:
inputs[:, dim] = val
unfixed.remove(dim)
# Compute GP predictions if not provided
if predictions is None:
mean, var = gp._raw_predict(inputs)
else:
mean, var = predictions
surf = axis.plot_trisurf(inputs[:, unfixed[0]],
inputs[:, unfixed[1]],
mean[:, 0],
cmap=cm.jet, linewidth=0.2, alpha=0.5)
data = axis.plot(gp.X[:-1, unfixed[0]],
gp.X[:-1, unfixed[1]],
gp.Y[:-1, 0],
'o')
axis.plot(gp.X[-1, unfixed[0]],
gp.X[-1, unfixed[1]],
gp.Y[-1, 0],
'ro')
axis.set_xlim([np.min(inputs[:, unfixed[0]]),
np.max(inputs[:, unfixed[0]])])
axis.set_ylim([np.min(inputs[:, unfixed[1]]),
np.max(inputs[:, unfixed[1]])])
return surf, data
def plot_contour_gp(gp, inputs, predictions=None, figure=None, axis=None,
colorbar=True, **kwargs):
"""
Plot a 3D gp with uncertainty.
Parameters
----------
gp: Instance of GPy.models.GPRegression
inputs: list of arrays/floats
The input parameters at which the GP is to be evaluated,
here instead of the combinations of inputs the individual inputs
that are spread in a grid are given. Only two of the arrays
should have more than one value (not fixed).
predictions: ndarray
Can be used to manually pass the GP predictions, set to None to
use the gp directly.
figure: matplotlib figure
The figure on which to draw (ignored if axis is provided
axis: matplotlib axis
The axis on which to draw
Returns
-------
contour: matplotlib contour plot
colorbar: matplotlib colorbar
points: matplotlib plot
"""
if axis is None:
if figure is None:
figure = plt.figure()
axis = figure.gca()
else:
axis = figure.gca()
# Find which inputs are fixed to constant values
slices = []
lengths = []
for i, inp in enumerate(inputs):
if isinstance(inp, np.ndarray):
slices.append(i)
lengths.append(inp.shape[0])
mesh = np.meshgrid(*inputs, indexing='ij')
if predictions is None:
# Convert to array with combinations of inputs
gp_inputs = np.array([x.ravel() for x in mesh]).T
mean = gp._raw_predict(gp_inputs)[0]
else:
mean = predictions[0]
c_bar = None
if not np.all(mean == mean[0]):
# Need to squeeze the added dimensions caused by fixed inputs
c = axis.contour(mesh[slices[0]].squeeze(),
mesh[slices[1]].squeeze(),
mean.squeeze().reshape(*lengths),
20,
**kwargs)
if colorbar:
c_bar = plt.colorbar(c)
else:
c = None
data = axis.plot(gp.X[:-1, slices[0]], gp.X[:-1, slices[1]], 'ob')
axis.plot(gp.X[-1, slices[0]], gp.X[-1, slices[1]], 'or')
axis.set_xlim([np.min(inputs[slices[0]]),
np.max(inputs[slices[0]])])
axis.set_ylim([np.min(inputs[slices[1]]),
np.max(inputs[slices[1]])])
return c, c_bar, data
| 35.303665
| 79
| 0.548643
|
f77edd20e09dfb45eb941f5e5a818505aeade4ab
| 7,341
|
py
|
Python
|
scripts/build-doc.py
|
MosHumanoid/bitbots_thmos_meta
|
f45ccc362dc689b69027be5b0d000d2a08580de4
|
[
"MIT"
] | 16
|
2018-12-14T16:12:40.000Z
|
2022-03-26T06:59:33.000Z
|
scripts/build-doc.py
|
MosHumanoid/bitbots_thmos_meta
|
f45ccc362dc689b69027be5b0d000d2a08580de4
|
[
"MIT"
] | 37
|
2019-01-05T12:01:34.000Z
|
2021-11-03T11:47:32.000Z
|
scripts/build-doc.py
|
MosHumanoid/bitbots_thmos_meta
|
f45ccc362dc689b69027be5b0d000d2a08580de4
|
[
"MIT"
] | 7
|
2019-07-28T08:32:13.000Z
|
2022-03-13T06:32:20.000Z
|
#!/usr/bin/env python3
import sys
import os
import argparse
import subprocess
from rospkg import RosPack
EXCLUDED_PACKAGES = [
'wolves_image_provider', # not our package and we wil deprecate it soon anyways
'bitbots_animation_server', # startup on import
'bitbots_dynamixel_debug', # startup on import
]
INCOMPATIBLE_PACKAGES = [
'humanoid_league_interactive_marker', # doesnt work properly TODO fix
'humanoid_league_rviz_marker', # not currently python2 compatible
'udp_bridge', # not currently python2 compatible
'bitbots_imageloader', # not currently python2 compatible
'bitbots_vision', # problems with VisionExtensions and numpy imports
'bitbots_ros_control', # some \ref commands dont resolve
]
class Colors:
BLACK = "\033[0;30m"
RED = "\033[0;31m"
GREEN = "\033[0;32m"
ORANGE = "\033[0;33m"
BLUE = "\033[0;34m"
PURPLE = "\033[0;35m"
CYAN = "\033[0;36m"
LIGHT_GRAY = "\033[0;37m"
DARK_GRAY = "\033[1;30m"
LIGHT_RED = "\033[1;31m"
LIGHT_GREEN = "\033[1;32m"
YELLOW = "\033[1;33m"
LIGHT_BLUE = "\033[1;34m"
LIGHT_PURPLE = "\033[1;35m"
LIGHT_CYAN = "\033[1;36m"
WHITE = "\033[1;37m"
NO_COLOR = "\033[0m"
def log_error(message):
print("{}[!]{} {}".format(Colors.RED, Colors.NO_COLOR, message.replace("\n", "\n ")),
file=sys.stderr)
def log_info(message):
print("{}[i]{} {}".format(Colors.BLUE, Colors.NO_COLOR, message.replace("\n", "\n ")))
def log_warn(message):
print("{}[w]{} {}".format(Colors.YELLOW, Colors.NO_COLOR, message.replace("\n", "\n ")))
def parse_args():
parser = argparse.ArgumentParser(
description='A utility script to build and merge bitbots documentation'
)
mode = parser.add_mutually_exclusive_group()
mode.add_argument('-m', '--meta',
action='store_const', const=True, default=False,
help='Build manual documentation from bitbots_meta only'
)
mode.add_argument('-p', '--package',
help='Build documentation for single package only')
mode.add_argument('-i', '--internal',
action='store_const', const=True, default=False,
help='Build internal documentation from "doc_internal" only')
parser.add_argument('-v',
action='count', dest='verbosity', default=0,
help='Be more verbose. Can be given multiple times')
parser.add_argument('-e', '--exclude',
action='append', dest='excludes', default=EXCLUDED_PACKAGES,
help='Exclude a package from documentation generation generation')
return parser.parse_args()
def handle_process_output(args, process):
if process.returncode != 0:
log_error("Error calling {}".format(process.args[0]))
if process.stderr:
print(process.stderr)
if args.verbosity >= 1 and process.stdout:
print(process.stdout)
else:
if args.verbosity >= 1:
if process.stderr:
print(process.stderr)
if args.verbosity >= 2 and process.stdout:
print(process.stdout)
else:
if process.stderr:
log_warn("{} printed to stderr. Supply -v to see".format(process.args[0]))
def filter_packages_for_bitbots(rospack, args):
return [pkg_name
for pkg_name in rospack.list()
if "bitbots" in rospack.get_path(pkg_name)
and os.path.join("bitbots_meta", "lib") not in rospack.get_path(pkg_name)
and pkg_name not in args.excludes
]
def build_package_doc(rospack, pkg_name, args):
if not pkg_name in rospack.list():
log_error("Package {} is not in $ROS_PACKAGE_PATH".format(pkg_name))
return
log_info("Building documentation for package {}".format(pkg_name))
if pkg_name in INCOMPATIBLE_PACKAGES:
log_warn(
"{} is marked as not fully compatible\n"
"Sphinx will throw errors when it tries to import this package and code documentation will be incomplete"
.format(pkg_name))
if os.path.isdir(os.path.join(rospack.get_path(pkg_name), "src")):
log_info("Indexing source code")
p = subprocess.run([
"sphinx-apidoc", "-f",
"-o", os.path.join("doc", "_generated"),
"--ext-autodoc",
"--ext-doctest",
"--ext-intersphinx",
"--ext-todo",
"--ext-coverage",
"--ext-mathjax",
"--ext-viewcode",
"src"
], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.DEVNULL,
cwd=rospack.get_path(pkg_name),
encoding="ASCII")
handle_process_output(args, p)
log_info("Building html to {}"
.format(os.path.join(os.path.basename(rospack.get_path(pkg_name)), "doc", "_build", "html", "index.html")))
p = subprocess.run([
"rosdoc_lite", "./",
"-o", os.path.join("doc", "_build")
], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.DEVNULL,
cwd=rospack.get_path(pkg_name),
encoding='ASCII')
handle_process_output(args, p)
def build_meta_doc(args):
log_info("Building bitbots_meta documentation")
doc_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "doc")
p = subprocess.run(["sphinx-build", doc_dir, os.path.join(doc_dir, "_build")],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='ASCII')
handle_process_output(args, p)
def build_internal_doc(args):
doc_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "doc_internal")
log_info("Building internal documentation to {}".format(os.path.join(doc_dir, '_build', 'index.html')))
if not os.path.isdir(doc_dir):
log_error('Path {} not found or not a directory\nInternal documentation should be located here'.format(doc_dir))
return
p = subprocess.run(["sphinx-build", doc_dir, os.path.join(doc_dir, "_build")],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="ASCII")
handle_process_output(args, p)
if __name__ == '__main__':
args = parse_args()
build_all = not args.meta and not args.package and not args.internal
log_info("Indexing packages")
ros_package_path = os.getenv("ROS_PACKAGE_PATH", "")
if ":" in ros_package_path:
ros_package_path = ros_package_path.split(":")
else:
ros_package_path = [ros_package_path]
rospack = RosPack(ros_package_path)
if build_all:
for pkg_name in filter_packages_for_bitbots(rospack, args):
print()
build_package_doc(rospack, pkg_name, args)
if args.package:
if args.package not in filter_packages_for_bitbots(rospack, args):
log_error("The package {} is not a bitbots package or excluded".format(args.package))
exit(1)
print()
build_package_doc(rospack, args.package, args)
if build_all or args.meta:
print()
build_meta_doc(args)
if build_all or args.internal:
print()
build_internal_doc(args)
| 35.293269
| 120
| 0.620896
|
c15c9b47036632b3f4d6691a7cf6428f704e15a5
| 12,967
|
py
|
Python
|
tests/models/test_security.py
|
evimacs/omicron
|
abe77fd25a93cf3d0d17661ae957373474724535
|
[
"MIT"
] | 4
|
2020-11-09T02:23:51.000Z
|
2021-01-24T00:45:21.000Z
|
tests/models/test_security.py
|
evimacs/omicron
|
abe77fd25a93cf3d0d17661ae957373474724535
|
[
"MIT"
] | 14
|
2020-11-09T02:31:34.000Z
|
2021-12-22T10:15:47.000Z
|
tests/models/test_security.py
|
evimacs/omicron
|
abe77fd25a93cf3d0d17661ae957373474724535
|
[
"MIT"
] | 2
|
2021-01-24T00:45:25.000Z
|
2021-12-24T06:18:37.000Z
|
import datetime
import logging
import unittest
from unittest import mock
import arrow
import numpy as np
import omicron
from omicron import cache
from omicron.core.timeframe import tf
from omicron.core.types import FrameType, SecurityType
from omicron.models.securities import Securities
from omicron.models.security import Security
from tests import init_test_env, load_data, start_omega
logger = logging.getLogger(__name__)
cfg = init_test_env()
class SecurityTest(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self) -> None:
self.omega = await start_omega()
await omicron.init()
self.securities = Securities()
await self.securities.load()
async def asyncTearDown(self) -> None:
await omicron.shutdown()
if self.omega is not None:
self.omega.kill()
def assert_bars_equal(self, expected, actual):
self.assertEqual(expected[0][0], actual[0][0])
self.assertEqual(expected[-1][0], actual[-1][0])
np.testing.assert_array_almost_equal(expected[0][1:5], list(actual[0])[1:5], 2)
np.testing.assert_array_almost_equal(
expected[-1][1:5], list(actual[-1])[1:5], 2
)
self.assertAlmostEqual(expected[0][6] / 10000, list(actual[0])[6] / 10000, 0)
self.assertAlmostEqual(expected[-1][6] / 10000, list(actual[-1])[6] / 10000, 0)
def test_000_properties(self):
sec = Security("000001.XSHE")
self.assertEqual("平安银行[000001.XSHE]", str(sec))
for key, value in zip(
"name display_name ipo_date end_date".split(" "),
"PAYH 平安银行 1991-04-03 2200-01-01".split(" "),
):
self.assertEqual(str(getattr(sec, key)), value)
sec = Security("399001.XSHE")
self.assertEqual("399001", sec.sim_code)
self.assertEqual(SecurityType.INDEX, sec.type)
self.assertEqual("399001", Security.simplify_code("399001.XSHE"))
with mock.patch("arrow.now", return_value=arrow.get("2020-12-31")):
self.assertEqual(3889, sec.days_since_ipo())
try:
sec.bars
self.assertTrue(False, "should not go here")
except ValueError:
pass
def test_001_parse_security_type(self):
codes = [
"600001.XSHG", # 浦发银行
"000001.XSHG", # 上证指数
"880001.XSHG", # 总市值
"999999.XSHG", # 上证指数
"511010.XSHG", # 国债ETF
"100303.XSHG", # 国债0303
"110031.XSHG", # 航信转债
"120201.XSHG", # 02三峡债
"000001.XSHE", # 平安银行
"300001.XSHE", # 特锐德
"399001.XSHE", # 深成指
"150001.XSHE", # 福锐进取
"131800.XSHE", # 深圳债券
"200011.XSHE", # B股
]
expected = [
SecurityType.STOCK,
SecurityType.INDEX,
SecurityType.INDEX,
SecurityType.INDEX,
SecurityType.ETF,
SecurityType.BOND,
SecurityType.BOND,
SecurityType.BOND,
SecurityType.STOCK,
SecurityType.STOCK,
SecurityType.INDEX,
SecurityType.ETF,
SecurityType.BOND,
SecurityType.STOCK_B,
]
for i, code in enumerate(codes):
self.assertEqual(Security.parse_security_type(code), expected[i])
async def test_002_load_bars(self):
sec = Security("000001.XSHE")
start = arrow.get("2020-01-03").date()
stop = arrow.get("2020-1-16").date()
frame_type = FrameType.DAY
expected = [
[
arrow.get("2020-01-03").date(),
16.94,
17.31,
16.92,
17.18,
1.11619481e8,
1914495474.63,
118.73,
],
[stop, 16.52, 16.57, 16.2, 16.33, 1.02810467e8, 1678888507.83, 118.73],
]
logger.info("scenario: no cache")
await cache.clear_bars_range(sec.code, frame_type)
bars = await sec.load_bars(start, start, frame_type)
self.assert_bars_equal([expected[0]], bars)
bars = await sec.load_bars(start, stop, frame_type)
self.assert_bars_equal(expected, bars)
logger.info("scenario: load from cache")
bars = await sec.load_bars(start, stop, frame_type)
self.assert_bars_equal(expected, bars)
logger.info("scenario: partial data fetch: head")
await cache.set_bars_range(
sec.code, frame_type, start=arrow.get("2020-01-07").date()
)
bars = await sec.load_bars(start, stop, frame_type)
self.assert_bars_equal(expected, bars)
logger.info("scenario: partial data fetch: tail")
await cache.set_bars_range(
sec.code, frame_type, end=arrow.get("2020-01-14").date()
)
bars = await sec.load_bars(start, stop, frame_type)
self.assert_bars_equal(expected, bars)
logger.info("scenario: 1min level backward")
frame_type = FrameType.MIN1
start = arrow.get("2020-05-06 15:00:00", tzinfo=cfg.tz).datetime
await cache.clear_bars_range(sec.code, frame_type)
stop = tf.shift(start, -249, frame_type)
start, stop = stop, start
bars = await sec.load_bars(start, stop, frame_type)
# fmt:off
expected = [
[
arrow.get('2020-04-30 14:51:00', tzinfo=cfg.tz).datetime, 13.99, 14.,
13.98, 13.99, 281000., 3931001., 118.725646
],
[
arrow.get('2020-05-06 15:00:00', tzinfo=cfg.tz).datetime, 13.77,
13.77, 13.77, 13.77, 1383400.0, 19049211.45000005, 118.725646
]
]
# fmt:on
self.assert_bars_equal(expected, bars)
logger.info("scenario: 30 min level")
frame_type = FrameType.MIN15
start = arrow.get("2020-05-06 10:15:00", tzinfo=cfg.tz).datetime
await cache.clear_bars_range(sec.code, frame_type)
stop = arrow.get("2020-05-06 15:00:00", tzinfo=cfg.tz).datetime
bars = await sec.load_bars(start, stop, frame_type)
# fmt: off
expected = [
[
arrow.get('2020-05-06 10:15:00', tzinfo=cfg.tz).datetime, 13.67,
13.74, 13.66, 13.72, 8341905., 1.14258451e+08, 118.725646
],
[
arrow.get('2020-05-06 15:00:00', tzinfo=cfg.tz).datetime, 13.72,
13.77, 13.72, 13.77, 7053085., 97026350.76999998, 118.725646
]
]
# fmt: on
self.assert_bars_equal(expected, bars)
async def test_005_realtime_bars(self):
"""测试获取实时行情"""
sec = Security("000001.XSHE")
frame_type = FrameType.MIN15
logger.info("scenario: get realtime bars")
start = arrow.get("2020-05-06 10:15:00", tzinfo=cfg.tz).datetime
stop = arrow.get("2020-05-06 10:25:00", tzinfo=cfg.tz).datetime
await cache.clear_bars_range(sec.code, frame_type)
bars = await sec.load_bars(start, stop, frame_type)
self.assertEqual(start, bars[0]["frame"])
self.assertEqual(stop, bars[-1]["frame"])
# now we've cached bars at 2020-05-06 10:15:00
bars = await sec.load_bars(start, stop, frame_type)
self.assertEqual(start, bars[0]["frame"])
self.assertEqual(stop, bars[-1]["frame"])
async def test_003_slice(self):
sec = Security("000001.XSHE")
start = arrow.get("2020-01-03").date()
stop = arrow.get("2020-01-16").date()
await sec.load_bars(start, stop, FrameType.DAY)
bars = sec[0:]
expected = [
[start, 16.94, 17.31, 16.92, 17.18, 1.11619481e8, 1914495474.63, 118.73],
[stop, 16.52, 16.57, 16.2, 16.33, 1.02810467e8, 1678888507.83, 118.73],
]
self.assert_bars_equal(expected, bars)
expected = [
[
arrow.get("2020-01-03").date(),
16.94,
17.31,
16.92,
17.18,
1.11619481e8,
1914495474.63,
118.73,
],
[
arrow.get("2020-01-06").date(),
17.01,
17.34,
16.91,
17.07,
86208350.0,
1477930193.19,
118.73,
],
]
self.assert_bars_equal(expected, sec[0:2])
async def test_004_fq(self):
"""测试复权"""
sec = Security("002320.XSHE")
start = arrow.get("2020-05-06").date()
stop = tf.shift(start, -249, FrameType.DAY)
start, stop = stop, start
# bars with no fq
bars1 = await sec.load_bars(start, stop, FrameType.DAY, fq=False)
bars2 = await sec.load_bars(start, stop, FrameType.DAY)
self.assertEqual(250, len(bars1))
expected1 = [
[
arrow.get("2019-04-24").date(),
16.26,
16.38,
15.76,
16.00,
5981087.0,
9.598480e07,
3.846000,
],
[
arrow.get("2020-05-06").date(),
10.94,
11.22,
10.90,
11.15,
22517883.0,
2.488511e08,
8.849346,
],
]
expected2 = [
[
arrow.get("2019-04-24").date(),
7.07,
7.12,
6.85,
6.95,
13762015.0,
9.598480e07,
3.846000,
],
[
arrow.get("2020-05-06").date(),
10.94,
11.22,
10.90,
11.15,
22517883.0,
2.488511e08,
8.849346,
],
]
self.assert_bars_equal(expected2, bars2)
self.assert_bars_equal(expected1, bars1)
async def test_price_change(self):
sec = Security("000001.XSHG")
frame_type = FrameType.DAY
start = arrow.get("2020-07-29").date()
end = arrow.get("2020-8-7").date()
pc = await sec.price_change(start, end, frame_type, False)
self.assertAlmostEqual(pc, 3354.04 / 3294.55 - 1, places=3)
async def test_load_bars_batch(self):
codes = ["000001.XSHE", "000001.XSHG"]
# end = arrow.now(tz=cfg.tz).datetime
# async for code, bars in Security.load_bars_batch(codes, end, 10,
# FrameType.MIN30):
# print(bars[-2:])
# self.assertEqual(10, len(bars))
#
# codes = ['000001.XSHE', '000001.XSHG']
end = arrow.get("2020-08-27").datetime
async for code, bars in Security.load_bars_batch(codes, end, 5, FrameType.DAY):
print(code, bars[-2:])
self.assertEqual(5, len(bars))
self.assertEqual(bars[-1]["frame"], end.date())
if code == "000001.XSHG":
self.assertAlmostEqual(3350.11, bars[-1]["close"], places=2)
async def test_get_bars_with_turnover(self):
code = "000001.XSHE"
start = arrow.get("2020-01-03").date()
stop = arrow.get("2020-1-16").date()
frame_type = FrameType.DAY
expected = [
0.5752,
0.4442,
0.3755,
0.4369,
0.5316,
0.3017,
0.4494,
0.6722,
0.4429,
0.5298,
]
await cache.security.delete(f"{code}:{frame_type.value}")
sec = Security(code)
bars = await sec.load_bars(start, stop, frame_type, turnover=True)
for i, bar in enumerate(bars):
self.assertAlmostEqual(expected[i], bar["turnover"], places=3)
start = arrow.get("2020-11-02 15:00:00", tzinfo=cfg.tz).datetime
stop = arrow.get("2020-11-06 14:30:00", tzinfo=cfg.tz).datetime
frame_type = FrameType.MIN30
await cache.security.delete(f"{code}:{frame_type.value}")
sec = Security(code)
bars = await sec.load_bars(start, stop, frame_type, turnover=True)
expected = [0.02299885, 0.02921041]
self.assertAlmostEqual(expected[0], bars["turnover"][-2], places=3)
self.assertAlmostEqual(expected[1], bars["turnover"][-1], places=3)
def test_load_bars_from_dataframe(self):
code = "000001.XSHG"
sec = Security("000001.XSHG")
df = load_data(code, "1d")
sec.load_bars_from_dataframe(df)
self.assertEqual(2188, len(sec))
self.assertEqual(datetime.date(2012, 1, 4), sec.frame[0])
self.assertEqual(datetime.date(2020, 12, 31), sec.frame[-1])
if __name__ == "__main__":
unittest.main()
| 33.248718
| 87
| 0.534896
|
33d0f0db12e46418fc2e9dffd1d0868e30758acf
| 1,991
|
py
|
Python
|
tests/unit/test_copr_helper.py
|
stefwalter/packit
|
d675018518ef200a06ea7636dd203100d872a772
|
[
"MIT"
] | 81
|
2019-02-07T15:38:34.000Z
|
2020-07-16T06:33:02.000Z
|
tests/unit/test_copr_helper.py
|
stefwalter/packit
|
d675018518ef200a06ea7636dd203100d872a772
|
[
"MIT"
] | 825
|
2019-02-07T15:08:16.000Z
|
2020-08-02T08:11:23.000Z
|
tests/unit/test_copr_helper.py
|
stefwalter/packit
|
d675018518ef200a06ea7636dd203100d872a772
|
[
"MIT"
] | 51
|
2019-02-08T09:56:29.000Z
|
2020-06-17T15:34:00.000Z
|
import pytest
from flexmock import flexmock
import packit
from packit.copr_helper import CoprHelper
class TestCoprHelper:
@pytest.mark.parametrize(
# copr_client.mock_chroot_proxy.get_list() returns dictionary
"get_list_keys, expected_return",
[
pytest.param(["chroot1", "_chroot2"], ["chroot1"], id="chroot_list"),
pytest.param([], [], id="empty_list"),
],
)
def test_get_avilable_chroots(self, get_list_keys, expected_return):
copr_client_mock = flexmock(mock_chroot_proxy=flexmock())
copr_client_mock.mock_chroot_proxy.should_receive("get_list.keys").and_return(
get_list_keys
)
flexmock(packit.copr_helper.CoprClient).should_receive(
"create_from_config_file"
).and_return(copr_client_mock)
copr_helper = CoprHelper("_upstream_local_project")
copr_helper.get_available_chroots.cache_clear()
assert copr_helper.get_available_chroots() == expected_return
@pytest.mark.parametrize(
"owner,project,section,expected_suffix",
[
(
"@rhinstaller",
"Anaconda",
"permissions",
"g/rhinstaller/Anaconda/permissions/",
),
("@rhinstaller", "Anaconda", None, "g/rhinstaller/Anaconda/edit/"),
("someone", "Anaconda", "permissions", "someone/Anaconda/permissions/"),
],
)
def test_settings_url(self, owner, project, section, expected_suffix):
copr_client_mock = flexmock(config={"copr_url": "https://fedoracloud.org"})
flexmock(packit.copr_helper.CoprClient).should_receive(
"create_from_config_file"
).and_return(copr_client_mock)
copr_helper = CoprHelper("_upstream_local_project")
assert (
copr_helper.get_copr_settings_url(owner, project, section)
== f"https://fedoracloud.org/coprs/{expected_suffix}"
)
| 35.553571
| 86
| 0.640382
|
2bd0267a7f141fa14c94384f48e0a4ca49792458
| 921
|
py
|
Python
|
pycalc/reference/main_window.py
|
lbreede/pyqt-cheatsheet
|
ca094108ed60f2f0c3ae3ba5c7b4b5e7a5c73861
|
[
"MIT"
] | null | null | null |
pycalc/reference/main_window.py
|
lbreede/pyqt-cheatsheet
|
ca094108ed60f2f0c3ae3ba5c7b4b5e7a5c73861
|
[
"MIT"
] | null | null | null |
pycalc/reference/main_window.py
|
lbreede/pyqt-cheatsheet
|
ca094108ed60f2f0c3ae3ba5c7b4b5e7a5c73861
|
[
"MIT"
] | null | null | null |
import sys
from PyQt5.QtWidgets import QApplication
from PyQt5.QtWidgets import QLabel
from PyQt5.QtWidgets import QMainWindow
from PyQt5.QtWidgets import QStatusBar
from PyQt5.QtWidgets import QToolBar
class Window(QMainWindow):
def __init__(self, parent=None):
super().__init__(parent)
self.setWindowTitle('QMainWindow')
self.setCentralWidget(QLabel("I'm the Central Widget"))
self._createMenu()
self._createToolBar()
self._createStatusBar()
def _createMenu(self):
self.menu = self.menuBar().addMenu("&Menu")
self.menu.addAction('&Exit', self.close)
def _createToolBar(self):
tools = QToolBar()
self.addToolBar(tools)
tools.addAction('Exit', self.close)
def _createStatusBar(self):
status = QStatusBar()
status.showMessage("I'm the Status Bar")
self.setStatusBar(status)
if __name__ == '__main__':
app = QApplication(sys.argv)
win = Window()
win.show()
sys.exit(app.exec_())
| 24.891892
| 57
| 0.7481
|
8422d8c0dbcb5f94d9a01e56b11562201eb4188d
| 8,196
|
py
|
Python
|
pypistats/cli.py
|
cclauss/pypistats
|
0230093b7b55b1ad5026256f6119a3fbedbe810e
|
[
"MIT"
] | null | null | null |
pypistats/cli.py
|
cclauss/pypistats
|
0230093b7b55b1ad5026256f6119a3fbedbe810e
|
[
"MIT"
] | null | null | null |
pypistats/cli.py
|
cclauss/pypistats
|
0230093b7b55b1ad5026256f6119a3fbedbe810e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# encoding: utf-8
"""
CLI with subcommands for pypistats
"""
import argparse
from datetime import date, datetime
from dateutil.relativedelta import relativedelta
import pypistats
cli = argparse.ArgumentParser()
subparsers = cli.add_subparsers(dest="subcommand")
def argument(*name_or_flags, **kwargs):
"""Convenience function to properly format arguments to pass to the
subcommand decorator.
"""
return list(name_or_flags), kwargs
def subcommand(args=None, parent=subparsers):
"""Decorator to define a new subcommand in a sanity-preserving way.
The function will be stored in the ``func`` variable when the parser
parses arguments so that it can be called directly like so::
args = cli.parse_args()
args.func(args)
Usage example::
@subcommand([argument("-d", help="Enable debug mode", action="store_true")])
def subcommand(args):
print(args)
Then on the command line::
$ python cli.py subcommand -d
https://mike.depalatis.net/blog/simplifying-argparse.html
"""
if args is None:
args = []
def decorator(func):
func2 = getattr(pypistats, func.__name__)
parser = parent.add_parser(
func.__name__,
description=func2.__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
for arg in args:
parser.add_argument(*arg[0], **arg[1])
parser.set_defaults(func=func)
return decorator
def _valid_date(date_string, date_format):
try:
datetime.strptime(date_string, date_format)
return date_string
except ValueError:
msg = f"Not a valid date: '{date_string}'."
raise argparse.ArgumentTypeError(msg)
def _valid_yyyy_mm_dd(date_string):
return _valid_date(date_string, "%Y-%m-%d")
def _valid_yyyy_mm(date_string):
return _valid_date(date_string, "%Y-%m")
def _valid_yyyy_mm_optional_dd(date_string):
try:
return _valid_yyyy_mm_dd(date_string)
except argparse.ArgumentTypeError:
return _valid_yyyy_mm(date_string)
def _define_format(args) -> str:
if args.json:
return "json"
return args.format
FORMATS = ("json", "markdown", "rst", "html")
arg_start_date = argument(
"-sd",
"--start-date",
metavar="yyyy-mm[-dd]",
type=_valid_yyyy_mm_optional_dd,
help="Start date",
)
arg_end_date = argument(
"-ed",
"--end-date",
metavar="yyyy-mm[-dd]",
type=_valid_yyyy_mm_optional_dd,
help="End date",
)
arg_month = argument(
"-m",
"--month",
metavar="yyyy-mm",
type=_valid_yyyy_mm,
help="Shortcut for -sd & -ed for a single month",
)
arg_last_month = argument(
"-l",
"--last-month",
help="Shortcut for -sd & -ed for last month",
action="store_true",
)
arg_json = argument("-j", "--json", action="store_true", help='Shortcut for "-f json"')
arg_daily = argument("-d", "--daily", action="store_true", help="Show daily downloads")
arg_monthly = argument("--monthly", action="store_true", help="Show monthly downloads")
arg_format = argument(
"-f", "--format", default="markdown", choices=FORMATS, help="The format of output"
)
arg_verbose = argument(
"-v", "--verbose", action="store_true", help="Print debug messages to stderr"
)
@subcommand(
[
argument("package"),
argument("-p", "--period", choices=("day", "week", "month")),
arg_format,
arg_json,
arg_verbose,
]
)
def recent(args): # pragma: no cover
print(
pypistats.recent(
args.package, period=args.period, format=args.format, verbose=args.verbose
)
)
@subcommand(
[
argument("package"),
argument("--mirrors", choices=("true", "false", "with", "without")),
arg_format,
arg_json,
arg_start_date,
arg_end_date,
arg_month,
arg_last_month,
arg_daily,
arg_monthly,
arg_verbose,
]
)
def overall(args): # pragma: no cover
if args.mirrors in ["with", "without"]:
args.mirrors = args.mirrors == "with"
print(
pypistats.overall(
args.package,
mirrors=args.mirrors,
start_date=args.start_date,
end_date=args.end_date,
format=args.format,
total="daily" if args.daily else ("monthly" if args.monthly else "all"),
verbose=args.verbose,
)
)
@subcommand(
[
argument("package"),
argument("-V", "--version", help="eg. 2 or 3"),
arg_format,
arg_json,
arg_start_date,
arg_end_date,
arg_month,
arg_last_month,
arg_daily,
arg_monthly,
arg_verbose,
]
)
def python_major(args): # pragma: no cover
print(
pypistats.python_major(
args.package,
version=args.version,
start_date=args.start_date,
end_date=args.end_date,
format=args.format,
total="daily" if args.daily else ("monthly" if args.monthly else "all"),
verbose=args.verbose,
)
)
@subcommand(
[
argument("package"),
argument("-V", "--version", help="eg. 2.7 or 3.6"),
arg_format,
arg_json,
arg_start_date,
arg_end_date,
arg_month,
arg_last_month,
arg_daily,
arg_monthly,
arg_verbose,
]
)
def python_minor(args): # pragma: no cover
print(
pypistats.python_minor(
args.package,
version=args.version,
start_date=args.start_date,
end_date=args.end_date,
format=args.format,
total="daily" if args.daily else ("monthly" if args.monthly else "all"),
verbose=args.verbose,
)
)
@subcommand(
[
argument("package"),
argument("-o", "--os", help="eg. windows, linux, darwin or other"),
arg_format,
arg_json,
arg_start_date,
arg_end_date,
arg_month,
arg_last_month,
arg_daily,
arg_monthly,
arg_verbose,
]
)
def system(args): # pragma: no cover
print(
pypistats.system(
args.package,
os=args.os,
start_date=args.start_date,
end_date=args.end_date,
format=args.format,
total="daily" if args.daily else ("monthly" if args.monthly else "all"),
verbose=args.verbose,
)
)
def _month(yyyy_mm):
"""Helper to return start_date and end_date of a month as yyyy-mm-dd"""
year, month = map(int, yyyy_mm.split("-"))
first = date(year, month, 1)
last = first + relativedelta(months=1) - relativedelta(days=1)
return str(first), str(last)
def _last_month():
"""Helper to return start_date and end_date of the previous month as yyyy-mm-dd"""
today = date.today()
d = today - relativedelta(months=1)
return _month(d.isoformat()[:7])
def main():
cli.add_argument(
"-V", "--version", action="version", version=f"%(prog)s {pypistats.__version__}"
)
args = cli.parse_args()
if args.subcommand is None:
cli.print_help()
else:
# Convert yyyy-mm to yyyy-mm-dd
if hasattr(args, "start_date") and args.start_date:
try:
# yyyy-mm
args.start_date, _ = _month(args.start_date)
except ValueError:
# yyyy-mm-dd
pass
# Convert yyyy-mm to yyyy-mm-dd
if hasattr(args, "end_date") and args.end_date:
try:
# yyyy-mm
_, args.end_date = _month(args.end_date)
except ValueError:
# yyyy-mm-dd
pass
if hasattr(args, "month") and args.month:
args.start_date, args.end_date = _month(args.month)
if hasattr(args, "last_month") and args.last_month:
args.start_date, args.end_date = _last_month()
args.format = _define_format(args)
args.func(args)
if __name__ == "__main__":
main()
| 25.374613
| 88
| 0.591264
|
ed635d50d7511b2bd536ccb86ad98e6b4c33df41
| 733
|
py
|
Python
|
tingle/util.py
|
deathbeds/tingle
|
237fad78fbf2f727812e40d7dc4b4bdca44ae451
|
[
"MIT"
] | null | null | null |
tingle/util.py
|
deathbeds/tingle
|
237fad78fbf2f727812e40d7dc4b4bdca44ae451
|
[
"MIT"
] | null | null | null |
tingle/util.py
|
deathbeds/tingle
|
237fad78fbf2f727812e40d7dc4b4bdca44ae451
|
[
"MIT"
] | null | null | null |
import contextlib
def append_ast_transformers(shell, transformer):
if any(
x for x in shell.ast_transformers
if isinstance(x, transformer)
):
return
shell.ast_transformers.append(transformer())
def remove_ast_transformers(shell, transformer):
shell.ast_transformers = [
x for x in shell.ast_transformers
if not isinstance(x, transformer)
]
@contextlib.contextmanager
def argv(list):
import sys
prior, sys.argv = sys.argv, list
yield
sys.argv = prior
def ipy_transform(code):
try:
import IPython
code = IPython.core.inputtransformer2.TransformerManager().transform_cell(code)
except BaseException:
...
return code
| 20.942857
| 87
| 0.673943
|
6a6572cdd3cb2e00c140ca5b4db1980985dd8b73
| 1,044
|
py
|
Python
|
scrappyepfo/app.py
|
KB-perByte/KudosAssignment
|
aff0f9e91952220ddb926de7a366f5024b439beb
|
[
"Unlicense"
] | null | null | null |
scrappyepfo/app.py
|
KB-perByte/KudosAssignment
|
aff0f9e91952220ddb926de7a366f5024b439beb
|
[
"Unlicense"
] | null | null | null |
scrappyepfo/app.py
|
KB-perByte/KudosAssignment
|
aff0f9e91952220ddb926de7a366f5024b439beb
|
[
"Unlicense"
] | 1
|
2021-04-11T07:59:01.000Z
|
2021-04-11T07:59:01.000Z
|
try:
from scrappyepfo.data_scraper import get_comp_list, get_comp_list_mca
except:
from data_scraper import get_comp_list, get_comp_list_mca
from fuzzywuzzy import process
import pprint
from fastapi import FastAPI
app = FastAPI()
@app.get("/epfo/{companyName}")
async def readEpfo(companyName):
'''api call epfo'''
return perform_epfo(companyName)
@app.get("/mca/{companyName}")
async def readMca(companyName):
'''api call epfo'''
return perform_mca(companyName)
@app.get("/")
async def root():
'''api call root'''
return {"Status":"OK" }
@app.get("/devInfo/")
async def devInfo():
'''api call dev'''
data = {
"Name" : "Sagar Paul",
"Email" : "paul.sagar@yahoo.com",
"Github" : "https://github.com/KB-perByte",
}
return data
def perform_epfo(name):
comp_list = get_comp_list(name)
pprint.pprint(comp_list[0])
return comp_list[0]
def perform_mca(name):
comp_list = get_comp_list_mca(name)
pprint.pprint(comp_list)
return comp_list
| 20.470588
| 73
| 0.666667
|
3c6454d5c56094144789df077c5f2f6684e21b4e
| 18,536
|
py
|
Python
|
sentinel.py
|
karttur/geoimagine02-postgresdb
|
9f1682e42b6a3d68b41b80a80b742d00aea80f1d
|
[
"BSD-3-Clause"
] | null | null | null |
sentinel.py
|
karttur/geoimagine02-postgresdb
|
9f1682e42b6a3d68b41b80a80b742d00aea80f1d
|
[
"BSD-3-Clause"
] | null | null | null |
sentinel.py
|
karttur/geoimagine02-postgresdb
|
9f1682e42b6a3d68b41b80a80b742d00aea80f1d
|
[
"BSD-3-Clause"
] | null | null | null |
'''
Created on 8 juni 2018
@author: thomasgumbricht
'''
# Package application imports
from geoimagine.postgresdb import PGsession
from geoimagine.postgresdb.compositions import InsertCompDef, InsertCompProd, InsertLayer, SelectComp
from geoimagine.support import Today
class ManageSentinel(PGsession):
'''
DB support for setting up processes
'''
def __init__(self):
""" The constructor connects to the database"""
HOST = 'karttur'
query = self._GetCredentials( HOST )
#Connect to the Postgres Server
self.session = PGsession.__init__(self,query,'ManageSentinel')
def _InsertSentinelMODISTile(self, query):
self.cursor.execute("SELECT * FROM sentinel.regions WHERE mgrs = '%(mgrs)s' AND regiontype = '%(regiontype)s' AND regionid = '%(regionid)s';" %query)
record = self.cursor.fetchone()
if record == None and not query['delete']:
##print aD['senssat'],aD['typeid'],aD['subtype'], filecat, tD['pattern'], tD['folder'], tD['band'], tD['prefix'],suffix, tD['celltype'], tD['fileext']
self.cursor.execute("INSERT INTO sentinel.regions (regionid, regiontype, mgrs, mgrsid, utm) VALUES (%s, %s, %s, %s, %s)",
(query['regionid'], query['regiontype'],query['mgrs'], query['mgrsid'], query['utmzone']))
self.conn.commit()
elif record and query['delete']:
self.cursor.execute("DELETE FROM modis.regions WHERE mgrs = '%(mgrss' AND regiontype = '%(regiontype)s'AND regionid = '%(regionid)s';" %query)
self.conn.commit()
def _InsertSentinelRegionTile(self, query):
self.cursor.execute("SELECT * FROM %(system)s.regions WHERE regionid = '%(regionid)s';" %query)
record = self.cursor.fetchone()
if record == None:
#print "SELECT * FROM regions WHERE regions.regionid = '%(regid)s' AND regioncat = '%(cat)s' AND type = '%(typ)s';" %query
warnstr = 'WARNING can not add tile to region %(regionid)s, no such region at category %(category)s and type %(type)s' %query
print (warnstr)
return
self.cursor.execute("SELECT * FROM sentinel.regions WHERE mgrs = '%(mgrs)s' AND regiontype = '%(regiontype)s' AND regionid = '%(regionid)s';" %query)
record = self.cursor.fetchone()
if record == None and not query['delete']:
##print aD['senssat'],aD['typeid'],aD['subtype'], filecat, tD['pattern'], tD['folder'], tD['band'], tD['prefix'],suffix, tD['celltype'], tD['fileext']
self.cursor.execute("INSERT INTO sentinel.regions (regionid, regiontype, mgrs, mgrsid, utm) VALUES (%s, %s, %s, %s, %s)",
(query['regionid'], query['regiontype'],query['mgrs'], query['mgrsid'], query['utmzone']))
self.conn.commit()
elif record and query['delete']:
self.cursor.execute("DELETE FROM modis.regions WHERE mgrs = '%(mgrss' AND regiontype = '%(regiontype)s'AND regionid = '%(regionid)s';" %query)
self.conn.commit()
def _SelectSentinelRegionTiles(self,query):
#print ("SELECT path, row from regions.sentinel WHERE regionid = '%(regionid)s'" %query)
self.cursor.execute("SELECT path, row from regions.sentinel WHERE regionid = '%(regionid)s'" %query)
records = self.cursor.fetchall()
return records
def _GetMetaTranslator(self):
#print (self.name)
self.cursor.execute("SELECT * FROM sentinel.metatranslate")
records = self.cursor.fetchall()
recD = {}
for row in records:
recD[row[0]] = {'dst':row[1],'tab':row[2], 'typ':row[3]}
return recD
def _SelectComp(self,system,comp):
comp['system'] = system
return SelectComp(self, comp)
def _SelectLayer(self,system,queryD,paramL):
return self._SingleSearch(queryD,paramL,system,'layers',True)
def _SelectLayerOnLocus(self,system,queryD,paramL):
rec = self._SingleSearch(queryD, paramL, system,' layers')
return dict(zip(paramL,rec))
def _InstertTileMeta(self,queryD):
rec = self._CheckInsertSingleRecord(queryD,'sentinel', 'tilemeta', [('tileid',)])
def _InsertGranuleMeta(self,queryD):
rec = self._CheckInsertSingleRecord(queryD,'sentinel', 'granulemeta', [('granuleid',)])
def _InstertTile(self,queryD):
rec = self._CheckInsertSingleRecord(queryD,'sentinel', 'tiles', [('tileid',)])
if rec != None:
if rec[2] != queryD['mgrs']:
print (rec)
print (queryD)
print (queryD['mgrs'],rec[2])
BALLE
def _InstertGranule(self,queryD):
rec = self._CheckInsertSingleRecord(queryD,'sentinel', 'granules', [('granuleid',)])
def _InsertVectorSearch(self,queryD):
self._CheckInsertSingleRecord(queryD,'sentinel', 'vectorsearches')
def _SelectVectorSearch(self,queryD,paramL):
rec = self._SingleSearch(queryD,paramL,'sentinel', 'vectorsearches')
return rec
def _UpdateTileStatus(self, queryD):
query = "UPDATE sentinel.tiles SET %(column)s = '%(status)s' WHERE tileid = '%(tileid)s'" %queryD
self.cursor.execute(query)
self.conn.commit()
def _UpdateGranuleStatus(self, queryD):
query = "UPDATE sentinel.granules SET %(column)s = '%(status)s' WHERE granuleid = '%(granuleid)s'" %queryD
self.cursor.execute(query)
self.conn.commit()
def _SelectSentinelGranules(self,params, period, statusD):
queryD = {}
queryD['platformname'] = {'val':params.platformname, 'op':'=' }
queryD['product'] = {'val':params.prodtype, 'op':'=' }
if 'cloudcover' in statusD:
queryD['cloudcover'] = {'val':params.cloudmax, 'op':'<=' }
for status in statusD:
queryD[status] = {'val':statusD[status], 'op':'=' }
if period:
datumkey = period.datumL[0]
startdate = period.datumD[datumkey]['startdate']
queryD['acqdate'] = {'val':startdate, 'op':'>=' }
enddate = period.datumD[datumkey]['enddate']
queryD['#acqdate'] = {'val':enddate, 'op':'<=' }
if period.datumD[datumkey]['enddoy'] > 0:
startdoy = period.datumD[datumkey]['startdoy']
queryD['doy'] = {'val':startdoy, 'op':'>=' }
enddoy = period.datumD[datumkey]['enddoy']
queryD['#doy'] = {'val':enddoy, 'op':'<=' }
#if params.orbitdirection.upper() != 'B':
# pass
wherestr = self._DictToSelect(queryD)
query = "SELECT uuid, granuleid, source, product, folder, acqdate, orbitid FROM sentinel.granulemeta \
JOIN sentinel.granules USING (granuleid, product) \
%s;" %(wherestr)
print (query)
self.cursor.execute(query)
return self.cursor.fetchall()
def _SelectSentinelTiles(self,params, period, statusD):
queryD = {}
queryD['m.platformname'] = {'val':params.platformname, 'op':'=' }
queryD['t.product'] = {'val':params.prodtype, 'op':'=' }
for status in statusD:
queryD[status] = {'val':statusD[status], 'op':'=' }
if 'cloudcover' in statusD:
#overwrites and cloudcover from above
queryD['t.cloudcover'] = {'val':params.cloudmax, 'op':'<=' }
datumkey = period.datumL[0]
startdate = period.datumD[datumkey]['startdate']
queryD['t.acqdate'] = {'val':startdate, 'op':'>=' }
enddate = period.datumD[datumkey]['enddate']
queryD['#t.acqdate'] = {'val':enddate, 'op':'<=' }
if period.datumD[datumkey]['enddoy'] > 0:
startdoy = period.datumD[datumkey]['startdoy']
queryD['t.doy'] = {'val':startdoy, 'op':'>=' }
enddoy = period.datumD[datumkey]['enddoy']
queryD['#t.doy'] = {'val':enddoy, 'op':'<=' }
if params.orbitdirection.upper() != 'B':
BALLE
wherestr = self._DictToSelect(queryD)
query = "SELECT DISTINCT ON (m.uuid) m.uuid, t.tileid, t.source, t.product, t.folder, t.acqdate, t.orbitid, t.utm, t.mgrsid, t.mgrs \
FROM sentinel.tilemeta AS M \
INNER JOIN sentinel.tiles AS T ON (M.tileid = T.tileid) "
if 'r.regionid' in statusD:
query += "INNER JOIN sentinel.regions AS R ON (T.mgrs = R.mgrs) "
query += wherestr
self.cursor.execute(query)
return self.cursor.fetchall()
def _SelectSentinelTemplate(self,queryD,paramL):
#return self._SingleSearch(queryD,'modis','template',paramL)
return self._MultiSearch(queryD,paramL,'sentinel','template')
def _InsertLayer(self,layer, overwrite = False, delete = False):
InsertLayer(self,layer,overwrite, delete)
def _InsertTileCoords(self,query):
'''
#rec = self._SingleSearch(query,'sentinel', 'vectorsearches')
'''
self.cursor.execute("SELECT * FROM sentinel.tilecoords WHERE mgrs = '%(mgrs)s';" %query)
record = self.cursor.fetchone()
if record == None:
self._InsertRecord(query, 'sentinel', 'tilecoords')
else:
search = {'mgrs':query['mgrs']}
query.pop('mgrs')
self._UpdateRecord(query, 'sentinel', 'tilecoords', search)
def _SelectSentinelTile(self,query):
self.cursor.execute("SELECT * FROM sentinel.tilecoords WHERE mgrs = '%(mgrs)s';" %query)
return self.cursor.fetchone()
def _SelectSentinelTileCoords(self, searchD):
#construct where statement - LATER
query = {}
self.cursor.execute("SELECT epsg, mgrs,utmzone,mgrsid,minx,miny,maxx,maxy,ullat,ullon,lrlat,lrlon,urlat,urlon,lllat,lllon FROM sentinel.tilecoords;" %query)
records = self.cursor.fetchall()
return records
def _SelectAllDefRegionsOld(self,wherestatement):
print ('wherestatement',wherestatement)
return SelectAllDefRegions(self,'sentinel','regions',wherestatement)
def _SelectAllDefRegions(self, wherestatement = '' ):
query = {'schema': 'sentinel', 'table':'regions', 'where':wherestatement}
if wherestatement == '':
self.cursor.execute("SELECT regioncat, regionid FROM system.defregions;" %query)
else:
#print ("SELECT DISTINCT R.regioncat, R.regionid FROM system.defregions R LEFT JOIN %(schema)s.%(table)s M ON (R.regionid = M.regionid) WHERE %(where)s;" %query)
print ("SELECT DISTINCT R.regioncat, R.regionid FROM system.defregions R LEFT JOIN %(schema)s.%(table)s M ON (R.regionid = M.regionid) %(where)s;" %query)
self.cursor.execute("SELECT DISTINCT R.regioncat, R.regionid FROM system.defregions R LEFT JOIN %(schema)s.%(table)s M ON (R.regionid = M.regionid) %(where)s;" %query)
return self.cursor.fetchall()
#return SelectAllDefRegions(self,'modis','regions',wherestatement)
def _InsertMGRSCoords(self,query):
#rec = self._SingleSearch(query,'sentinel', 'vectorsearches')
self.cursor.execute("SELECT * FROM sentinel.mgrscoords WHERE mgrs = '%(mgrs)s';" %query)
record = self.cursor.fetchone()
if record == None:
self._InsertRecord(query, 'sentinel', 'mgrscoords')
else:
search = {'mgrs':query['mgrs']}
query.pop('mgrs')
self._UpdateRecord(query, 'sentinel', 'mgrscoords', search)
def _InsertSentinelTileCoordOld(self,hvtile,h,v,ulxsin,ulysin,lrxsin,lrysin,ullat,ullon,lrlon,lrlat,urlon,urlat,lllon,lllat):
query = {'hvtile':hvtile}
#source, product, folder, band, prefix, suffix, fileext, celltype, dataunit, compid, hdfgrid, hdffolder, scalefactor, offsetadd, cellnull, retrieve, ecode
self.cursor.execute("SELECT * FROM sentinel.tilecoords WHERE hvtile = '%(hvtile)s';" %query)
record = self.cursor.fetchone()
if record == None:
self.cursor.execute("INSERT INTO sentinel.tilecoords (hvtile,h,v,minxsin,maxysin,maxxsin,minysin,ullat,ullon,lrlon,lrlat,urlon,urlat,lllon,lllat) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) ;",
(hvtile,h,v,ulxsin,ulysin,lrxsin,lrysin,ullat,ullon,lrlon,lrlat,urlon,urlat,lllon,lllat))
self.conn.commit()
def _SearchMGRSfromCentroid(self,lon,lat):
query = {'lon':lon, 'lat':lat}
self.cursor.execute("SELECT mgrs,west,south,east,north,ullon,ullat,urlon,urlat,lrlon,lrlat,lllon,lllat FROM sentinel.tilecoords WHERE %(lon)s > west AND %(lon)s < east AND %(lat)s > south and %(lat)s < north;" %query)
records = self.cursor.fetchall()
return records
def _SearchTilesFromWSEN(self, west, south, east, north):
query = {'west':west, 'south':south,'east':east,'north':north}
#self.cursor.execute("SELECT mgrs,west,south,east,north,ullon,ullat,urlon,urlat,lrlon,lrlat,lllon,lllat, minx, miny, maxx, maxy FROM sentinel.tilecoords WHERE centerlon > %(west)s AND centerlon < %(east)s AND centerlat > %(south)s AND centerlat < %(north)s;" %query)
self.cursor.execute("SELECT mgrs,west,south,east,north,ullon,ullat,urlon,urlat,lrlon,lrlat,lllon,lllat, minx, miny, maxx, maxy FROM sentinel.tilecoords WHERE east > %(west)s AND west < %(east)s AND north > %(south)s AND south < %(north)s;" %query)
#self.cursor.execute("SELECT epsg, mgrs,utmzone,mgrsid,minx,miny,maxx,maxy,ullat,ullon,lrlat,lrlon,urlat,urlon,lllat,lllon FROM sentinel.tilecoords;" %query)
records = self.cursor.fetchall()
return records
def _SearchMGRSFromWSEN(self, west, south, east, north, sentinel):
query = {'west':west, 'south':south,'east':east,'north':north,'sentinel':sentinel}
if sentinel:
self.cursor.execute("SELECT mgrs,west,south,east,north FROM sentinel.mgrscoords WHERE east > %(west)s AND west < %(east)s AND north > %(south)s AND south < %(north)s AND sentinel = '%(sentinel)s';" %query)
else:
self.cursor.execute("SELECT mgrs,west,south,east,north FROM sentinel.mgrscoords WHERE east > %(west)s AND west < %(east)s AND north > %(south)s AND south < %(north)s;" %query)
records = self.cursor.fetchall()
return records
def _InsertGranuleTiles(self, granuleid, tileD):
query = {'granuleid':granuleid}
self.cursor.execute("SELECT mgrs FROM sentinel.granuletiles WHERE granuleid = '%(granuleid)s';" %query)
records = self.cursor.fetchall()
mgrsL = [item[0] for item in records]
for tile in tileD:
if tile not in mgrsL:
query['tile'] = tile
query['overlap'] = tileD[tile]
self.cursor.execute("INSERT INTO sentinel.granuletiles (granuleid, mgrs, overlap) VALUES ('%(granuleid)s', '%(tile)s', %(overlap)s)" %query)
self.conn.commit()
def _SelectGranuleTiles(self, granuleid,overlap):
#query = {'granuleid':granuleid}
query = {'granuleid':granuleid,'overlap':overlap}
self.cursor.execute("SELECT mgrs FROM sentinel.granuletiles WHERE granuleid = '%(granuleid)s' and overlap >= %(overlap)s;" %query)
records = self.cursor.fetchall()
mgrsL = [item[0] for item in records]
return mgrsL
def _GetGranuleMeta(self, granuleid):
query = {'granuleid':granuleid}
#print ("SELECT product, proclevel, orbitnr, orbitdir, cloudcover, sensopmode, s2datatakeid, procbase, platformid, platformname, instrument \
#FROM sentinel.granulemeta WHERE granuleid = '%(granuleid)s';" %query)
self.cursor.execute("SELECT product, proclevel, orbitnr, orbitdir, cloudcover, sensopmode, s2datatakeid, procbase, platformid, platformname, instrument \
FROM sentinel.granulemeta WHERE granuleid = '%(granuleid)s';" %query)
record = self.cursor.fetchone()
return record
def _GetGranuleTile(self, granuleid):
query = {'granuleid':granuleid}
#print ("SELECT orbitid, acqdate, acqtime, sunazimuth, sunelevation, doy, source, product, folder, filetype, filename, downloaded, organized, exploded, deleted, declouded, maskstatus, metacheck, tgnote \
#FROM sentinel.granules WHERE granuleid = '%(granuleid)s';" %query)
self.cursor.execute("SELECT orbitid, acqdate, acqtime, sunazimuth, sunelevation, doy, source, product, folder, filetype, filename, downloaded, organized, exploded, deleted, declouded, maskstatus, metacheck, tgnote \
FROM sentinel.granules WHERE granuleid = '%(granuleid)s';" %query)
record = self.cursor.fetchone()
return record
def _SelectMGRS(self,mgrs):
query = {'mgrs':mgrs}
self.cursor.execute("SELECT utmzone,mgrsid,proj4,minx,miny,maxx,maxy,refsize,refcols,reflins FROM sentinel.tilecoords WHERE mgrs = '%(mgrs)s'" %query)
record = self.cursor.fetchone()
if record == None:
print ("SELECT utmzone,mgrsid,proj4,minx,miny,maxx,maxy,refsize,refcols,reflins FROM sentinel.tilecoords WHERE mgrs = '%(mgrs)s'" %query)
return record
def _InsertSingleSentinelRegion(self,queryD):
'''
'''
tabkeys = (['regionid'],['mgrs'])
#regionid,mgrs
self._CheckInsertSingleRecord(queryD, 'sentinel', 'regions', tabkeys)
def _SelectRegionTiles(self,queryD):
'''
'''
#print ("SELECT mgrs, utm, mgrsid FROM sentinel.regions WHERE regionid = '%(regionid)s' and regiontype = '%(regiontype)s';" %queryD)
self.cursor.execute("SELECT mgrs, utm, mgrsid FROM sentinel.regions WHERE regionid = '%(regionid)s' and regiontype = '%(regiontype)s';" %queryD)
records = self.cursor.fetchall()
return (records)
def _SelectUniqueRegionTiles(self,queryD):
'''
'''
#print ("SELECT mgrs, utm, mgrsid FROM sentinel.regions WHERE regionid = '%(regionid)s' and regiontype = '%(regiontype)s';" %queryD)
self.cursor.execute("SELECT mgrs, utm, mgrsid FROM sentinel.regions WHERE regionid = '%(regionid)s' and regiontype = '%(regiontype)s';" %queryD)
records = self.cursor.fetchall()
return (records)
def _SelectSentineRegions(self,queryD):
'''
'''
self.cursor.execute("SELECT DISTINCT ON (regionid) regionid FROM sentinel.regions WHERE regiontype = '%(regiontype)s';" %queryD)
records = self.cursor.fetchall()
regionidL = [item[0] for item in records]
return regionidL
| 51.20442
| 274
| 0.631636
|
06234792dbd19d49bed18234cae1df56af3d351a
| 778
|
py
|
Python
|
ctapipe/flow/algorithms/string_writer.py
|
mpecimotika/ctapipe
|
ffd7930921f7139b761fbf1208da16dd302e97a6
|
[
"BSD-3-Clause"
] | null | null | null |
ctapipe/flow/algorithms/string_writer.py
|
mpecimotika/ctapipe
|
ffd7930921f7139b761fbf1208da16dd302e97a6
|
[
"BSD-3-Clause"
] | null | null | null |
ctapipe/flow/algorithms/string_writer.py
|
mpecimotika/ctapipe
|
ffd7930921f7139b761fbf1208da16dd302e97a6
|
[
"BSD-3-Clause"
] | null | null | null |
from ctapipe.core import Component
from traitlets import Unicode
from time import sleep
class StringWriter(Component):
"""`StringWriter` class represents a Stage or a Consumer for pipeline.
It writes received objects to file
"""
filename = Unicode('/tmp/test.txt', help='output filename').tag(
config=True, allow_none=True)
def init(self):
self.file = open(self.filename, 'w')
self.log.debug("--- StringWriter init filename {}---".format(self.filename))
return True
def run(self, object):
self.file.write(str(object) + "\n")
sleep(.5)
self.log.debug('%object' % 'StringWriter write {}')
def finish(self):
self.file.close()
self.log.debug("--- StringWriter finish---")
| 28.814815
| 84
| 0.631105
|
5c440bd6d9fb2caa55918edcb3dc8e77f89558eb
| 781
|
py
|
Python
|
tests/sliding_window_throttler_test.py
|
berknology/api-throttler
|
5f0c5687db3687858f84e24e232b53286e6adfcf
|
[
"BSD-2-Clause"
] | 1
|
2021-01-01T08:22:54.000Z
|
2021-01-01T08:22:54.000Z
|
tests/sliding_window_throttler_test.py
|
berknology/api-throttler
|
5f0c5687db3687858f84e24e232b53286e6adfcf
|
[
"BSD-2-Clause"
] | null | null | null |
tests/sliding_window_throttler_test.py
|
berknology/api-throttler
|
5f0c5687db3687858f84e24e232b53286e6adfcf
|
[
"BSD-2-Clause"
] | null | null | null |
import time
from unittest import TestCase
from api_throttler import SlidingWindowThrottler
class TestSlidingWindowThrottler(TestCase):
def setUp(self) -> None:
self.throttler = SlidingWindowThrottler(2, 5)
def test_continuous_calls(self):
allowed_calls = 0
for i in range(5):
if not self.throttler.is_throttled(key="test_key"):
allowed_calls += 1
time.sleep(1)
self.assertEqual(allowed_calls, 2)
def test_periodic_calls(self):
allowed_calls = 0
for i in range(10):
if i in {0, 4, 5, 6}:
if not self.throttler.is_throttled(key="test_key"):
allowed_calls += 1
time.sleep(1)
self.assertEqual(allowed_calls, 3)
| 27.892857
| 67
| 0.610755
|
0c5a99c4f336233f2c68290fadd33937eac2d44d
| 20,246
|
py
|
Python
|
facebook_business/adobjects/adcreative.py
|
ioluwayo/facebook-python-business-sdk
|
c085e1efb0a0d06d6abdf4bc4c0e262f9b478ad9
|
[
"CNRI-Python"
] | null | null | null |
facebook_business/adobjects/adcreative.py
|
ioluwayo/facebook-python-business-sdk
|
c085e1efb0a0d06d6abdf4bc4c0e262f9b478ad9
|
[
"CNRI-Python"
] | null | null | null |
facebook_business/adobjects/adcreative.py
|
ioluwayo/facebook-python-business-sdk
|
c085e1efb0a0d06d6abdf4bc4c0e262f9b478ad9
|
[
"CNRI-Python"
] | null | null | null |
# Copyright 2014 Facebook, Inc.
# You are hereby granted a non-exclusive, worldwide, royalty-free license to
# use, copy, modify, and distribute this software in source code or binary
# form for use in connection with the web services and APIs provided by
# Facebook.
# As with any software that integrates with the Facebook platform, your use
# of this software is subject to the Facebook Developer Principles and
# Policies [http://developers.facebook.com/policy/]. This copyright notice
# shall be included in all copies or substantial portions of the software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from facebook_business.adobjects.abstractobject import AbstractObject
from facebook_business.adobjects.abstractcrudobject import AbstractCrudObject
from facebook_business.adobjects.objectparser import ObjectParser
from facebook_business.api import FacebookRequest
from facebook_business.typechecker import TypeChecker
from facebook_business.mixins import HasAdLabels
"""
This class is auto-generated.
For any issues or feature requests related to this class, please let us know on
github and we'll fix in our codegen framework. We'll not be able to accept
pull request for this class.
"""
class AdCreative(
AbstractCrudObject,
HasAdLabels,
):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isAdCreative = True
super(AdCreative, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
account_id = 'account_id'
actor_id = 'actor_id'
adlabels = 'adlabels'
applink_treatment = 'applink_treatment'
asset_feed_spec = 'asset_feed_spec'
authorization_category = 'authorization_category'
auto_update = 'auto_update'
body = 'body'
branded_content_sponsor_page_id = 'branded_content_sponsor_page_id'
bundle_folder_id = 'bundle_folder_id'
call_to_action_type = 'call_to_action_type'
categorization_criteria = 'categorization_criteria'
category_media_source = 'category_media_source'
destination_set_id = 'destination_set_id'
dynamic_ad_voice = 'dynamic_ad_voice'
effective_authorization_category = 'effective_authorization_category'
effective_instagram_media_id = 'effective_instagram_media_id'
effective_instagram_story_id = 'effective_instagram_story_id'
effective_object_story_id = 'effective_object_story_id'
enable_direct_install = 'enable_direct_install'
enable_launch_instant_app = 'enable_launch_instant_app'
id = 'id'
image_crops = 'image_crops'
image_hash = 'image_hash'
image_url = 'image_url'
instagram_actor_id = 'instagram_actor_id'
instagram_permalink_url = 'instagram_permalink_url'
instagram_story_id = 'instagram_story_id'
interactive_components_spec = 'interactive_components_spec'
link_deep_link_url = 'link_deep_link_url'
link_destination_display_url = 'link_destination_display_url'
link_og_id = 'link_og_id'
link_url = 'link_url'
messenger_sponsored_message = 'messenger_sponsored_message'
name = 'name'
object_id = 'object_id'
object_store_url = 'object_store_url'
object_story_id = 'object_story_id'
object_story_spec = 'object_story_spec'
object_type = 'object_type'
object_url = 'object_url'
place_page_set_id = 'place_page_set_id'
platform_customizations = 'platform_customizations'
playable_asset_id = 'playable_asset_id'
portrait_customizations = 'portrait_customizations'
product_set_id = 'product_set_id'
recommender_settings = 'recommender_settings'
status = 'status'
template_url = 'template_url'
template_url_spec = 'template_url_spec'
thumbnail_url = 'thumbnail_url'
title = 'title'
url_tags = 'url_tags'
use_page_actor_override = 'use_page_actor_override'
video_id = 'video_id'
call_to_action = 'call_to_action'
image_file = 'image_file'
instant_checkout_setting = 'instant_checkout_setting'
is_dco_internal = 'is_dco_internal'
class ApplinkTreatment:
deeplink_with_appstore_fallback = 'deeplink_with_appstore_fallback'
deeplink_with_web_fallback = 'deeplink_with_web_fallback'
web_only = 'web_only'
class CallToActionType:
add_to_cart = 'ADD_TO_CART'
apply_now = 'APPLY_NOW'
book_travel = 'BOOK_TRAVEL'
buy = 'BUY'
buy_now = 'BUY_NOW'
buy_tickets = 'BUY_TICKETS'
call = 'CALL'
call_me = 'CALL_ME'
contact = 'CONTACT'
contact_us = 'CONTACT_US'
donate = 'DONATE'
donate_now = 'DONATE_NOW'
download = 'DOWNLOAD'
event_rsvp = 'EVENT_RSVP'
find_a_group = 'FIND_A_GROUP'
find_your_groups = 'FIND_YOUR_GROUPS'
follow_news_storyline = 'FOLLOW_NEWS_STORYLINE'
follow_page = 'FOLLOW_PAGE'
follow_user = 'FOLLOW_USER'
get_directions = 'GET_DIRECTIONS'
get_offer = 'GET_OFFER'
get_offer_view = 'GET_OFFER_VIEW'
get_quote = 'GET_QUOTE'
get_showtimes = 'GET_SHOWTIMES'
install_app = 'INSTALL_APP'
install_mobile_app = 'INSTALL_MOBILE_APP'
learn_more = 'LEARN_MORE'
like_page = 'LIKE_PAGE'
listen_music = 'LISTEN_MUSIC'
listen_now = 'LISTEN_NOW'
message_page = 'MESSAGE_PAGE'
mobile_download = 'MOBILE_DOWNLOAD'
moments = 'MOMENTS'
no_button = 'NO_BUTTON'
open_link = 'OPEN_LINK'
order_now = 'ORDER_NOW'
pay_to_access = 'PAY_TO_ACCESS'
play_game = 'PLAY_GAME'
purchase_gift_cards = 'PURCHASE_GIFT_CARDS'
record_now = 'RECORD_NOW'
refer_friends = 'REFER_FRIENDS'
request_time = 'REQUEST_TIME'
say_thanks = 'SAY_THANKS'
see_more = 'SEE_MORE'
sell_now = 'SELL_NOW'
send_a_gift = 'SEND_A_GIFT'
share = 'SHARE'
shop_now = 'SHOP_NOW'
sign_up = 'SIGN_UP'
sotto_subscribe = 'SOTTO_SUBSCRIBE'
subscribe = 'SUBSCRIBE'
swipe_up_product = 'SWIPE_UP_PRODUCT'
swipe_up_shop = 'SWIPE_UP_SHOP'
update_app = 'UPDATE_APP'
use_app = 'USE_APP'
use_mobile_app = 'USE_MOBILE_APP'
video_annotation = 'VIDEO_ANNOTATION'
visit_pages_feed = 'VISIT_PAGES_FEED'
watch_more = 'WATCH_MORE'
watch_video = 'WATCH_VIDEO'
whatsapp_message = 'WHATSAPP_MESSAGE'
woodhenge_support = 'WOODHENGE_SUPPORT'
class ObjectType:
application = 'APPLICATION'
domain = 'DOMAIN'
event = 'EVENT'
invalid = 'INVALID'
offer = 'OFFER'
page = 'PAGE'
photo = 'PHOTO'
post_deleted = 'POST_DELETED'
privacy_check_fail = 'PRIVACY_CHECK_FAIL'
share = 'SHARE'
status = 'STATUS'
store_item = 'STORE_ITEM'
video = 'VIDEO'
class Status:
active = 'ACTIVE'
deleted = 'DELETED'
in_process = 'IN_PROCESS'
with_issues = 'WITH_ISSUES'
class AuthorizationCategory:
none = 'NONE'
political = 'POLITICAL'
class CategorizationCriteria:
brand = 'brand'
category = 'category'
product_type = 'product_type'
class CategoryMediaSource:
category = 'CATEGORY'
mixed = 'MIXED'
products_collage = 'PRODUCTS_COLLAGE'
products_slideshow = 'PRODUCTS_SLIDESHOW'
class DynamicAdVoice:
dynamic = 'DYNAMIC'
story_owner = 'STORY_OWNER'
class InstantCheckoutSetting:
off = 'off'
on = 'on'
class Operator:
all = 'ALL'
any = 'ANY'
# @deprecated get_endpoint function is deprecated
@classmethod
def get_endpoint(cls):
return 'adcreatives'
# @deprecated api_create is being deprecated
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.adaccount import AdAccount
return AdAccount(api=self._api, fbid=parent_id).create_ad_creative(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'account_id': 'string',
'adlabels': 'list<Object>',
'name': 'string',
'status': 'status_enum',
}
enums = {
'status_enum': AdCreative.Status.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='DELETE',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AbstractCrudObject,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'thumbnail_height': 'unsigned int',
'thumbnail_width': 'unsigned int',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdCreative,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'account_id': 'string',
'adlabels': 'list<Object>',
'name': 'string',
'status': 'status_enum',
}
enums = {
'status_enum': AdCreative.Status.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdCreative,
api_type='NODE',
response_parser=ObjectParser(reuse_object=self),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_ad_label(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {
'adlabels': 'list<Object>',
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='POST',
endpoint='/adlabels',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdCreative,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdCreative, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_creative_insights(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adcreativeinsights import AdCreativeInsights
param_types = {
}
enums = {
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/creative_insights',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdCreativeInsights,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdCreativeInsights, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_previews(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if batch is None and (success is not None or failure is not None):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adpreview import AdPreview
param_types = {
'ad_format': 'ad_format_enum',
'dynamic_asset_label': 'string',
'dynamic_creative_spec': 'Object',
'dynamic_customization': 'Object',
'end_date': 'datetime',
'height': 'unsigned int',
'locale': 'string',
'place_page_id': 'int',
'post': 'Object',
'product_item_ids': 'list<string>',
'render_type': 'render_type_enum',
'start_date': 'datetime',
'width': 'unsigned int',
}
enums = {
'ad_format_enum': AdPreview.AdFormat.__dict__.values(),
'render_type_enum': AdPreview.RenderType.__dict__.values(),
}
request = FacebookRequest(
node_id=self['id'],
method='GET',
endpoint='/previews',
api=self._api,
param_checker=TypeChecker(param_types, enums),
target_class=AdPreview,
api_type='EDGE',
response_parser=ObjectParser(target_class=AdPreview, api=self._api),
)
request.add_params(params)
request.add_fields(fields)
if batch is not None:
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {
'account_id': 'string',
'actor_id': 'string',
'adlabels': 'list<AdLabel>',
'applink_treatment': 'ApplinkTreatment',
'asset_feed_spec': 'AdAssetFeedSpec',
'authorization_category': 'string',
'auto_update': 'bool',
'body': 'string',
'branded_content_sponsor_page_id': 'string',
'bundle_folder_id': 'string',
'call_to_action_type': 'CallToActionType',
'categorization_criteria': 'string',
'category_media_source': 'string',
'destination_set_id': 'string',
'dynamic_ad_voice': 'string',
'effective_authorization_category': 'string',
'effective_instagram_media_id': 'string',
'effective_instagram_story_id': 'string',
'effective_object_story_id': 'string',
'enable_direct_install': 'bool',
'enable_launch_instant_app': 'bool',
'id': 'string',
'image_crops': 'AdsImageCrops',
'image_hash': 'string',
'image_url': 'string',
'instagram_actor_id': 'string',
'instagram_permalink_url': 'string',
'instagram_story_id': 'string',
'interactive_components_spec': 'AdCreativeInteractiveComponentsSpec',
'link_deep_link_url': 'string',
'link_destination_display_url': 'string',
'link_og_id': 'string',
'link_url': 'string',
'messenger_sponsored_message': 'string',
'name': 'string',
'object_id': 'string',
'object_store_url': 'string',
'object_story_id': 'string',
'object_story_spec': 'AdCreativeObjectStorySpec',
'object_type': 'ObjectType',
'object_url': 'string',
'place_page_set_id': 'string',
'platform_customizations': 'AdCreativePlatformCustomization',
'playable_asset_id': 'string',
'portrait_customizations': 'AdCreativePortraitCustomizations',
'product_set_id': 'string',
'recommender_settings': 'AdCreativeRecommenderSettings',
'status': 'Status',
'template_url': 'string',
'template_url_spec': 'AdCreativeTemplateURLSpec',
'thumbnail_url': 'string',
'title': 'string',
'url_tags': 'string',
'use_page_actor_override': 'bool',
'video_id': 'string',
'call_to_action': 'Object',
'image_file': 'string',
'instant_checkout_setting': 'InstantCheckoutSetting',
'is_dco_internal': 'bool',
}
@classmethod
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['ApplinkTreatment'] = AdCreative.ApplinkTreatment.__dict__.values()
field_enum_info['CallToActionType'] = AdCreative.CallToActionType.__dict__.values()
field_enum_info['ObjectType'] = AdCreative.ObjectType.__dict__.values()
field_enum_info['Status'] = AdCreative.Status.__dict__.values()
field_enum_info['AuthorizationCategory'] = AdCreative.AuthorizationCategory.__dict__.values()
field_enum_info['CategorizationCriteria'] = AdCreative.CategorizationCriteria.__dict__.values()
field_enum_info['CategoryMediaSource'] = AdCreative.CategoryMediaSource.__dict__.values()
field_enum_info['DynamicAdVoice'] = AdCreative.DynamicAdVoice.__dict__.values()
field_enum_info['InstantCheckoutSetting'] = AdCreative.InstantCheckoutSetting.__dict__.values()
field_enum_info['Operator'] = AdCreative.Operator.__dict__.values()
return field_enum_info
def _setitem_trigger(self, key, value):
if key == 'id':
self._data['creative_id'] = self['id']
| 38.711281
| 124
| 0.637311
|
6ca39a729998438d0eaac0fab26dfb22c9706d00
| 1,764
|
py
|
Python
|
4th-semester/aisd/lab/lista-4/ex-1-hash-table-chained-tests.py
|
jerry-sky/academic-notebook
|
be2d350289441b99168ea40412891bc65b9cb431
|
[
"Unlicense"
] | 4
|
2020-12-28T21:53:00.000Z
|
2022-03-22T19:24:47.000Z
|
4th-semester/aisd/lab/lista-4/ex-1-hash-table-chained-tests.py
|
jerry-sky/academic-notebook
|
be2d350289441b99168ea40412891bc65b9cb431
|
[
"Unlicense"
] | 3
|
2022-02-13T18:07:10.000Z
|
2022-02-13T18:16:07.000Z
|
4th-semester/aisd/lab/lista-4/ex-1-hash-table-chained-tests.py
|
jerry-sky/academic-notebook
|
be2d350289441b99168ea40412891bc65b9cb431
|
[
"Unlicense"
] | 4
|
2020-12-28T16:05:35.000Z
|
2022-03-08T16:20:00.000Z
|
#!/usr/bin/env python3
from rbt import RBT
from llist import LList
from time import time
from random import randint
from sys import argv, exit
if __name__ == "__main__":
if len(argv) < 2:
exit('usage ./ex-1-hash-table-chained-tests.py <items quantity>')
# initialize both structures
llist = LList()
rbt = RBT()
# add this quantity of items
items_count = int(argv[1])
# measure insertion times
llist_insert_times = []
rbt_insert_times = []
for i in range(items_count):
# t = i
t = randint(0, items_count)
# measure insertion time of a linked list
begin = time()
llist.insert(t)
end = time()
llist_insert_times.append(end-begin)
# measure insertion time of a RB-Tree
begin = time()
rbt.insert(t)
end = time()
rbt_insert_times.append(end-begin)
# calculate the average insertion time
llist_insert_times_avg = sum(llist_insert_times)/len(llist_insert_times)
rbt_insert_times_avg = sum(rbt_insert_times)/len(rbt_insert_times)
# measure searching time
t = randint(0, items_count)
begin = time()
x = llist.find(t)
end = time()
llist_find_time = end - begin
begin = time()
x = rbt.find(t)
end = time()
rbt_find_time = end - begin
print('llist insert avg:', llist_insert_times_avg)
print('rbt insert avg :', rbt_insert_times_avg)
print('avg insert rate :', rbt_insert_times_avg/llist_insert_times_avg)
print('rbt faster :', rbt_insert_times_avg < llist_insert_times_avg)
print()
print('llist find time :', llist_find_time)
print('rbt find time :', rbt_find_time)
print('rbt faster :', rbt_find_time < llist_find_time)
| 26.727273
| 77
| 0.646825
|
bb72f9435fdc29920b70ac72d5ae0238e0aa1869
| 1,351
|
py
|
Python
|
oandapyV20-examples-master/src/console/greenlets/accountdetails.py
|
cdibble2011/OANDA
|
68327d6d65dd92952d7a1dc49fe29efca766d900
|
[
"MIT"
] | 127
|
2017-02-28T17:34:14.000Z
|
2022-01-21T13:14:30.000Z
|
oandapyV20-examples-master/src/console/greenlets/accountdetails.py
|
cdibble2011/OANDA
|
68327d6d65dd92952d7a1dc49fe29efca766d900
|
[
"MIT"
] | 36
|
2018-06-07T21:34:13.000Z
|
2022-03-13T21:01:43.000Z
|
oandapyV20-examples-master/src/console/greenlets/accountdetails.py
|
cdibble2011/OANDA
|
68327d6d65dd92952d7a1dc49fe29efca766d900
|
[
"MIT"
] | 76
|
2017-01-02T14:15:07.000Z
|
2022-03-28T03:49:45.000Z
|
# -*- coding: utf-8 -*-
import gevent
from oandapyV20.endpoints.accounts import AccountDetails, AccountChanges
class GAccountDetails(gevent.Greenlet):
"""Greenlet to handle account details/changes.
Initially get the AccountDetails and then keep polling
for account changes.
In case of changes put those on the NAV-Queue
"""
def __init__(self, api, accountID, queue, sleepTime=4):
super(GAccountDetails, self).__init__()
self.api = api
self.accountID = accountID
self.queue = queue
self.sleepTime = sleepTime
def _run(self):
# setup the summary request
r = AccountDetails(accountID=self.accountID)
rv = self.api.request(r)
lastTransactionID = rv.get("lastTransactionID")
lastLastTransactionID = lastTransactionID
r = None
while True:
if not r or lastLastTransactionID != lastTransactionID:
params = {"sinceTransactionID":
int(rv.get("lastTransactionID"))}
r = AccountChanges(accountID=self.accountID, params=params)
lastLastTransactionID = lastTransactionID
rv = self.api.request(r)
lastTransactionID = rv.get('lastTransactionID')
self.queue.put_nowait(rv)
gevent.sleep(self.sleepTime)
| 32.95122
| 75
| 0.637306
|
52c6e2845176af3963ddcc61c67668a2d8a19e46
| 2,438
|
py
|
Python
|
psi_apps/rook_services/views.py
|
TwoRavens/PSI
|
fd5fdbfc84cba1a5916a0714ddd733b05ee779d2
|
[
"Apache-2.0"
] | 4
|
2019-07-21T04:46:41.000Z
|
2021-05-27T13:29:50.000Z
|
psi_apps/rook_services/views.py
|
TwoRavens/PSI
|
fd5fdbfc84cba1a5916a0714ddd733b05ee779d2
|
[
"Apache-2.0"
] | 37
|
2018-07-12T13:51:00.000Z
|
2019-02-07T20:32:50.000Z
|
psi_apps/rook_services/views.py
|
TwoRavens/PSI
|
fd5fdbfc84cba1a5916a0714ddd733b05ee779d2
|
[
"Apache-2.0"
] | 2
|
2018-07-16T20:01:22.000Z
|
2018-12-06T22:53:55.000Z
|
import json
import requests
import urllib.parse
from django.http import JsonResponse, HttpResponse, Http404
from django.contrib.auth.decorators import login_required
#from psiproject.settings.local import ROOK_SVC_URL
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from psi_apps.utils.view_helper import \
(get_json_error, get_json_success)
@login_required(login_url='login')
def view_rook_route(request, app_name_in_url):
"""Route the call to Rook and back"""
rook_svc_url = '{0}{1}'.format(settings.ROOK_SERVER_BASE, app_name_in_url)
decode = request.body.decode('utf-8')
data_payload = {'tableJSON': decode}
print('rook_svc_url', rook_svc_url)
try:
rservice_req = requests.post(rook_svc_url, data=data_payload)
except ConnectionError:
user_msg = 'R Server not responding: %s' % rook_svc_url
print('err_msg', user_msg)
return JsonResponse(get_json_error(user_msg))
print('status code from rook call: %d' % rservice_req.status_code)
print('rservice_req text', rservice_req.text)
try:
json_info = rservice_req.json()
except json.decoder.JSONDecodeError as ex_obj:
user_msg = 'rook response is not JSON. Error: %s' % ex_obj
return JsonResponse(get_json_error(user_msg))
# current
#
#if app_name_in_url not in ['privateStatisticsapp']:
# return JsonResponse(json_info)
# preferable, need UI to handle
#
user_resp = get_json_success('success',
data=json_info)
return JsonResponse(user_resp)
@login_required(login_url='login')
def download_rook_route(request, filename):
"""Route the call to Rook and back"""
print('filename', filename)
rook_svc_url = '{0}{1}{2}'.format(settings.ROOK_SERVER_BASE, 'rook-files/', filename)
print('rook_svc_url download', rook_svc_url)
try:
rservice_req = requests.get(rook_svc_url)
except ConnectionError:
user_msg = 'R Server not responding: %s' % rook_svc_url
print('err_msg', user_msg)
return JsonResponse(get_json_error(user_msg))
print('status code from rook call: %d' % rservice_req.status_code)
# print('rservice_req content', rservice_req.content)
return HttpResponse(
rservice_req.content,
status=rservice_req.status_code,
content_type=rservice_req.headers['Content-Type'])
| 30.475
| 89
| 0.705086
|
fa784858361414f36cfc834ad315624191e5ffdb
| 169
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_RESOURCES/Python-Practice-Problems-master/temp/prac6.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_RESOURCES/Python-Practice-Problems-master/temp/prac6.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_RESOURCES/Python-Practice-Problems-master/temp/prac6.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
# Problem 6
# Ask the user for a string and print out whether this string is a palindrome or not. (A palindrome is a string that reads the same forwards and backwards.)
| 56.333333
| 156
| 0.769231
|
47420dd54a987a8969c31df715f0dd577800c6b1
| 33,036
|
py
|
Python
|
model/unet.py
|
EuphoriaYan/zi2zi
|
74375b45d74dfcd17fc645d2baa2e2bb470e9eb1
|
[
"Apache-2.0"
] | null | null | null |
model/unet.py
|
EuphoriaYan/zi2zi
|
74375b45d74dfcd17fc645d2baa2e2bb470e9eb1
|
[
"Apache-2.0"
] | null | null | null |
model/unet.py
|
EuphoriaYan/zi2zi
|
74375b45d74dfcd17fc645d2baa2e2bb470e9eb1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
import tensorflow.compat.v1 as tf
import numpy as np
import imageio
import os
import time
from collections import namedtuple
from .ops import conv2d, deconv2d, lrelu, fc, batch_norm, init_embedding, conditional_instance_norm
from .dataset import TrainDataProvider, InjectDataProvider, NeverEndingLoopingProvider
from .utils import scale_back, merge, save_concat_images
# Auxiliary wrapper classes
# Used to save handles(important nodes in computation graph) for later evaluation
LossHandle = namedtuple("LossHandle", ["d_loss", "g_loss", "const_loss", "l1_loss",
"category_loss", "cheat_loss", "tv_loss"])
InputHandle = namedtuple("InputHandle", ["real_data", "embedding_ids", "no_target_data", "no_target_ids"])
EvalHandle = namedtuple("EvalHandle", ["encoder", "generator", "target", "source", "embedding"])
SummaryHandle = namedtuple("SummaryHandle", ["d_merged", "g_merged"])
class UNet(object):
def __init__(self, experiment_dir=None, experiment_id=0, batch_size=16, input_width=256, output_width=256,
generator_dim=64, discriminator_dim=64, L1_penalty=100, Lconst_penalty=15, Ltv_penalty=0.0,
Lcategory_penalty=1.0, embedding_num=40, embedding_dim=128, input_filters=3, output_filters=3):
self.experiment_dir = experiment_dir
self.experiment_id = experiment_id
self.batch_size = batch_size
self.input_width = input_width
self.output_width = output_width
self.generator_dim = generator_dim
self.discriminator_dim = discriminator_dim
self.L1_penalty = L1_penalty
self.Lconst_penalty = Lconst_penalty
self.Ltv_penalty = Ltv_penalty
self.Lcategory_penalty = Lcategory_penalty
self.embedding_num = embedding_num
self.embedding_dim = embedding_dim
self.input_filters = input_filters
self.output_filters = output_filters
# init all the directories
self.sess = None
# experiment_dir is needed for training
if experiment_dir:
self.data_dir = os.path.join(self.experiment_dir, "data")
self.checkpoint_dir = os.path.join(self.experiment_dir, "checkpoint")
self.sample_dir = os.path.join(self.experiment_dir, "sample")
self.log_dir = os.path.join(self.experiment_dir, "logs")
if not os.path.exists(self.checkpoint_dir):
os.makedirs(self.checkpoint_dir)
print("create checkpoint directory")
if not os.path.exists(self.log_dir):
os.makedirs(self.log_dir)
print("create log directory")
if not os.path.exists(self.sample_dir):
os.makedirs(self.sample_dir)
print("create sample directory")
def encoder(self, images, is_training, reuse=False):
with tf.variable_scope("generator"):
if reuse:
tf.get_variable_scope().reuse_variables()
encode_layers = dict()
def encode_layer(x, output_filters, layer):
act = lrelu(x)
conv = conv2d(act, output_filters=output_filters, scope="g_e%d_conv" % layer)
enc = batch_norm(conv, is_training, scope="g_e%d_bn" % layer)
encode_layers["e%d" % layer] = enc
return enc
e1 = conv2d(images, self.generator_dim, scope="g_e1_conv")
encode_layers["e1"] = e1
e2 = encode_layer(e1, self.generator_dim * 2, 2)
e3 = encode_layer(e2, self.generator_dim * 4, 3)
e4 = encode_layer(e3, self.generator_dim * 8, 4)
e5 = encode_layer(e4, self.generator_dim * 8, 5)
e6 = encode_layer(e5, self.generator_dim * 8, 6)
e7 = encode_layer(e6, self.generator_dim * 8, 7)
e8 = encode_layer(e7, self.generator_dim * 8, 8)
return e8, encode_layers
def decoder(self, encoded, encoding_layers, ids, inst_norm, is_training, reuse=False):
with tf.variable_scope("generator"):
if reuse:
tf.get_variable_scope().reuse_variables()
s = self.output_width
s2, s4, s8, s16, s32, s64, s128 = int(s / 2), int(s / 4), int(s / 8), int(s / 16), int(s / 32), int(
s / 64), int(s / 128)
def decode_layer(x, output_width, output_filters, layer, enc_layer, dropout=False, do_concat=True):
dec = deconv2d(tf.nn.relu(x), [self.batch_size, output_width,
output_width, output_filters], scope="g_d%d_deconv" % layer)
if layer != 8:
# IMPORTANT: normalization for last layer
# Very important, otherwise GAN is unstable
# Trying conditional instance normalization to
# overcome the fact that batch normalization offers
# different train/test statistics
if inst_norm:
dec = conditional_instance_norm(dec, ids, self.embedding_num, scope="g_d%d_inst_norm" % layer)
else:
dec = batch_norm(dec, is_training, scope="g_d%d_bn" % layer)
if dropout:
dec = tf.nn.dropout(dec, rate=0.5)
if do_concat:
dec = tf.concat([dec, enc_layer], 3)
return dec
d1 = decode_layer(encoded, s128, self.generator_dim * 8, layer=1, enc_layer=encoding_layers["e7"],
dropout=True)
d2 = decode_layer(d1, s64, self.generator_dim * 8, layer=2, enc_layer=encoding_layers["e6"], dropout=True)
d3 = decode_layer(d2, s32, self.generator_dim * 8, layer=3, enc_layer=encoding_layers["e5"], dropout=True)
d4 = decode_layer(d3, s16, self.generator_dim * 8, layer=4, enc_layer=encoding_layers["e4"])
d5 = decode_layer(d4, s8, self.generator_dim * 4, layer=5, enc_layer=encoding_layers["e3"])
d6 = decode_layer(d5, s4, self.generator_dim * 2, layer=6, enc_layer=encoding_layers["e2"])
d7 = decode_layer(d6, s2, self.generator_dim, layer=7, enc_layer=encoding_layers["e1"])
d8 = decode_layer(d7, s, self.output_filters, layer=8, enc_layer=None, do_concat=False)
output = tf.nn.tanh(d8) # scale to (-1, 1)
return output
def generator(self, images, embeddings, embedding_ids, inst_norm, is_training, reuse=False):
e8, enc_layers = self.encoder(images, is_training=is_training, reuse=reuse)
local_embeddings = tf.nn.embedding_lookup(embeddings, ids=embedding_ids)
local_embeddings = tf.reshape(local_embeddings, [self.batch_size, 1, 1, self.embedding_dim])
embedded = tf.concat([e8, local_embeddings], 3)
output = self.decoder(embedded, enc_layers, embedding_ids, inst_norm, is_training=is_training, reuse=reuse)
return output, e8
def discriminator(self, image, is_training, reuse=False):
with tf.variable_scope("discriminator"):
if reuse:
tf.get_variable_scope().reuse_variables()
h0 = lrelu(conv2d(image, self.discriminator_dim, scope="d_h0_conv"))
h1 = lrelu(batch_norm(conv2d(h0, self.discriminator_dim * 2, scope="d_h1_conv"),
is_training, scope="d_bn_1"))
h2 = lrelu(batch_norm(conv2d(h1, self.discriminator_dim * 4, scope="d_h2_conv"),
is_training, scope="d_bn_2"))
h3 = lrelu(batch_norm(conv2d(h2, self.discriminator_dim * 8, sh=1, sw=1, scope="d_h3_conv"),
is_training, scope="d_bn_3"))
# real or fake binary loss
fc1 = fc(tf.reshape(h3, [self.batch_size, -1]), 1, scope="d_fc1")
# category loss
fc2 = fc(tf.reshape(h3, [self.batch_size, -1]), self.embedding_num, scope="d_fc2")
return tf.nn.sigmoid(fc1), fc1, fc2
def build_model(self, is_training=True, inst_norm=False, no_target_source=False):
real_data = tf.placeholder(tf.float32,
[self.batch_size, self.input_width, self.input_width,
self.input_filters + self.output_filters],
name='real_A_and_B_images')
embedding_ids = tf.placeholder(tf.int64, shape=None, name="embedding_ids")
no_target_data = tf.placeholder(tf.float32,
[self.batch_size, self.input_width, self.input_width,
self.input_filters + self.output_filters],
name='no_target_A_and_B_images')
no_target_ids = tf.placeholder(tf.int64, shape=None, name="no_target_embedding_ids")
# target images
real_B = real_data[:, :, :, :self.input_filters]
# source images
real_A = real_data[:, :, :, self.input_filters:self.input_filters + self.output_filters]
embedding = init_embedding(self.embedding_num, self.embedding_dim)
fake_B, encoded_real_A = self.generator(real_A, embedding, embedding_ids, is_training=is_training,
inst_norm=inst_norm)
real_AB = tf.concat([real_A, real_B], 3)
fake_AB = tf.concat([real_A, fake_B], 3)
# Note it is not possible to set reuse flag back to False
# initialize all variables before setting reuse to True
real_D, real_D_logits, real_category_logits = self.discriminator(real_AB, is_training=is_training, reuse=False)
fake_D, fake_D_logits, fake_category_logits = self.discriminator(fake_AB, is_training=is_training, reuse=True)
# encoding constant loss
# this loss assume that generated imaged and real image
# should reside in the same space and close to each other
encoded_fake_B = self.encoder(fake_B, is_training, reuse=True)[0]
const_loss = (tf.reduce_mean(tf.square(encoded_real_A - encoded_fake_B))) * self.Lconst_penalty
# category loss
true_labels = tf.reshape(tf.one_hot(indices=embedding_ids, depth=self.embedding_num),
shape=[self.batch_size, self.embedding_num])
real_category_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=real_category_logits,
labels=true_labels))
fake_category_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=fake_category_logits,
labels=true_labels))
category_loss = self.Lcategory_penalty * (real_category_loss + fake_category_loss)
# binary real/fake loss
d_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=real_D_logits,
labels=tf.ones_like(real_D)))
d_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=fake_D_logits,
labels=tf.zeros_like(fake_D)))
# L1 loss between real and generated images
l1_loss = self.L1_penalty * tf.reduce_mean(tf.abs(fake_B - real_B))
# total variation loss
width = self.output_width
tv_loss = (tf.nn.l2_loss(fake_B[:, 1:, :, :] - fake_B[:, :width - 1, :, :]) / width
+ tf.nn.l2_loss(fake_B[:, :, 1:, :] - fake_B[:, :, :width - 1, :]) / width) * self.Ltv_penalty
# maximize the chance generator fool the discriminator
cheat_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=fake_D_logits,
labels=tf.ones_like(fake_D)))
d_loss = d_loss_real + d_loss_fake + category_loss / 2.0
g_loss = cheat_loss + l1_loss + self.Lcategory_penalty * fake_category_loss + const_loss + tv_loss
if no_target_source:
# no_target source are examples that don't have the corresponding target images
# however, except L1 loss, we can compute category loss, binary loss and constant losses with those examples
# it is useful when discriminator get saturated and d_loss drops to near zero
# those data could be used as additional source of losses to break the saturation
no_target_A = no_target_data[:, :, :, self.input_filters:self.input_filters + self.output_filters]
no_target_B, encoded_no_target_A = self.generator(no_target_A, embedding, no_target_ids,
is_training=is_training,
inst_norm=inst_norm, reuse=True)
no_target_labels = tf.reshape(tf.one_hot(indices=no_target_ids, depth=self.embedding_num),
shape=[self.batch_size, self.embedding_num])
no_target_AB = tf.concat([no_target_A, no_target_B], 3)
no_target_D, no_target_D_logits, no_target_category_logits = self.discriminator(no_target_AB,
is_training=is_training,
reuse=True)
encoded_no_target_B = self.encoder(no_target_B, is_training, reuse=True)[0]
no_target_const_loss = tf.reduce_mean(
tf.square(encoded_no_target_A - encoded_no_target_B)) * self.Lconst_penalty
no_target_category_loss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(logits=no_target_category_logits,
labels=no_target_labels)) * self.Lcategory_penalty
d_loss_no_target = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=no_target_D_logits,
labels=tf.zeros_like(
no_target_D)))
cheat_loss += tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=no_target_D_logits,
labels=tf.ones_like(no_target_D)))
d_loss = d_loss_real + d_loss_fake + d_loss_no_target + (category_loss + no_target_category_loss) / 3.0
g_loss = cheat_loss / 2.0 + l1_loss + \
(self.Lcategory_penalty * fake_category_loss + no_target_category_loss) / 2.0 + \
(const_loss + no_target_const_loss) / 2.0 + tv_loss
d_loss_real_summary = tf.summary.scalar("d_loss_real", d_loss_real)
d_loss_fake_summary = tf.summary.scalar("d_loss_fake", d_loss_fake)
category_loss_summary = tf.summary.scalar("category_loss", category_loss)
cheat_loss_summary = tf.summary.scalar("cheat_loss", cheat_loss)
l1_loss_summary = tf.summary.scalar("l1_loss", l1_loss)
fake_category_loss_summary = tf.summary.scalar("fake_category_loss", fake_category_loss)
const_loss_summary = tf.summary.scalar("const_loss", const_loss)
d_loss_summary = tf.summary.scalar("d_loss", d_loss)
g_loss_summary = tf.summary.scalar("g_loss", g_loss)
tv_loss_summary = tf.summary.scalar("tv_loss", tv_loss)
d_merged_summary = tf.summary.merge([d_loss_real_summary, d_loss_fake_summary,
category_loss_summary, d_loss_summary])
g_merged_summary = tf.summary.merge([cheat_loss_summary, l1_loss_summary,
fake_category_loss_summary,
const_loss_summary,
g_loss_summary, tv_loss_summary])
# expose useful nodes in the graph as handles globally
input_handle = InputHandle(real_data=real_data,
embedding_ids=embedding_ids,
no_target_data=no_target_data,
no_target_ids=no_target_ids)
loss_handle = LossHandle(d_loss=d_loss,
g_loss=g_loss,
const_loss=const_loss,
l1_loss=l1_loss,
category_loss=category_loss,
cheat_loss=cheat_loss,
tv_loss=tv_loss)
eval_handle = EvalHandle(encoder=encoded_real_A,
generator=fake_B,
target=real_B,
source=real_A,
embedding=embedding)
summary_handle = SummaryHandle(d_merged=d_merged_summary,
g_merged=g_merged_summary)
# those operations will be shared, so we need
# to make them visible globally
setattr(self, "input_handle", input_handle)
setattr(self, "loss_handle", loss_handle)
setattr(self, "eval_handle", eval_handle)
setattr(self, "summary_handle", summary_handle)
def register_session(self, sess):
self.sess = sess
def retrieve_trainable_vars(self, freeze_encoder=False):
t_vars = tf.trainable_variables()
d_vars = [var for var in t_vars if 'd_' in var.name]
g_vars = [var for var in t_vars if 'g_' in var.name]
if freeze_encoder:
# exclude encoder weights
print("freeze encoder weights")
g_vars = [var for var in g_vars if not ("g_e" in var.name)]
return g_vars, d_vars
def retrieve_generator_vars(self):
all_vars = tf.global_variables()
generate_vars = [var for var in all_vars if 'embedding' in var.name or "g_" in var.name]
return generate_vars
def retrieve_handles(self):
input_handle = getattr(self, "input_handle")
loss_handle = getattr(self, "loss_handle")
eval_handle = getattr(self, "eval_handle")
summary_handle = getattr(self, "summary_handle")
return input_handle, loss_handle, eval_handle, summary_handle
def get_model_id_and_dir(self):
model_id = "experiment_%d_batch_%d" % (self.experiment_id, self.batch_size)
model_dir = os.path.join(self.checkpoint_dir, model_id)
return model_id, model_dir
def checkpoint(self, saver, step):
model_name = "unet.model"
model_id, model_dir = self.get_model_id_and_dir()
if not os.path.exists(model_dir):
os.makedirs(model_dir)
saver.save(self.sess, os.path.join(model_dir, model_name), global_step=step)
def restore_model(self, saver, model_dir):
ckpt = tf.train.get_checkpoint_state(model_dir)
if ckpt:
saver.restore(self.sess, ckpt.model_checkpoint_path)
print("restored model %s" % model_dir)
else:
print("fail to restore model %s" % model_dir)
def generate_fake_samples(self, input_images, embedding_ids):
input_handle, loss_handle, eval_handle, summary_handle = self.retrieve_handles()
fake_images, real_images, \
d_loss, g_loss, l1_loss = self.sess.run([eval_handle.generator,
eval_handle.target,
loss_handle.d_loss,
loss_handle.g_loss,
loss_handle.l1_loss],
feed_dict={
input_handle.real_data: input_images,
input_handle.embedding_ids: embedding_ids,
input_handle.no_target_data: input_images,
input_handle.no_target_ids: embedding_ids
})
return fake_images, real_images, d_loss, g_loss, l1_loss
def validate_model(self, val_iter, epoch, step):
labels, images = next(val_iter)
fake_imgs, real_imgs, d_loss, g_loss, l1_loss = self.generate_fake_samples(images, labels)
print("Sample: d_loss: %.5f, g_loss: %.5f, l1_loss: %.5f" % (d_loss, g_loss, l1_loss))
merged_fake_images = merge(scale_back(fake_imgs), [self.batch_size, 1])
merged_real_images = merge(scale_back(real_imgs), [self.batch_size, 1])
merged_pair = np.concatenate([merged_real_images, merged_fake_images], axis=1)
model_id, _ = self.get_model_id_and_dir()
model_sample_dir = os.path.join(self.sample_dir, model_id)
if not os.path.exists(model_sample_dir):
os.makedirs(model_sample_dir)
sample_img_path = os.path.join(model_sample_dir, "sample_%02d_%04d.png" % (epoch, step))
imageio.imsave(sample_img_path, merged_pair)
def export_generator(self, save_dir, model_dir, model_name="gen_model"):
saver = tf.train.Saver()
self.restore_model(saver, model_dir)
gen_saver = tf.train.Saver(var_list=self.retrieve_generator_vars())
gen_saver.save(self.sess, os.path.join(save_dir, model_name), global_step=0)
def infer(self, source_obj, embedding_ids, model_dir, save_dir):
source_provider = InjectDataProvider(source_obj)
if isinstance(embedding_ids, int) or len(embedding_ids) == 1:
embedding_id = embedding_ids if isinstance(embedding_ids, int) else embedding_ids[0]
source_iter = source_provider.get_single_embedding_iter(self.batch_size, embedding_id)
else:
source_iter = source_provider.get_random_embedding_iter(self.batch_size, embedding_ids)
tf.global_variables_initializer().run()
saver = tf.train.Saver(var_list=self.retrieve_generator_vars())
self.restore_model(saver, model_dir)
def save_imgs(imgs, count):
p = os.path.join(save_dir, "inferred_%04d.png" % count)
save_concat_images(imgs, img_path=p)
print("generated images saved at %s" % p)
count = 0
batch_buffer = list()
for labels, source_imgs in source_iter:
fake_imgs, real_imgs, d_loss, g_loss, l1_loss = self.generate_fake_samples(source_imgs, labels)
merged_fake_images = merge(scale_back(fake_imgs), [self.batch_size, 1])
batch_buffer.append(merged_fake_images)
if len(batch_buffer) == 10:
save_imgs(batch_buffer, count)
batch_buffer = list()
count += 1
if batch_buffer:
# last batch
save_imgs(batch_buffer, count)
def interpolate(self, source_obj, between, model_dir, save_dir, steps):
tf.global_variables_initializer().run()
saver = tf.train.Saver(var_list=self.retrieve_generator_vars())
self.restore_model(saver, model_dir)
# new interpolated dimension
new_x_dim = steps + 1
alphas = np.linspace(0.0, 1.0, new_x_dim)
def _interpolate_tensor(_tensor):
"""
Compute the interpolated tensor here
"""
x = _tensor[between[0]]
y = _tensor[between[1]]
interpolated = list()
for alpha in alphas:
interpolated.append(x * (1. - alpha) + alpha * y)
interpolated = np.asarray(interpolated, dtype=np.float32)
return interpolated
def filter_embedding_vars(var):
var_name = var.name
if var_name.find("embedding") != -1:
return True
if var_name.find("inst_norm/shift") != -1 or var_name.find("inst_norm/scale") != -1:
return True
return False
embedding_vars = filter(filter_embedding_vars, tf.trainable_variables())
# here comes the hack, we overwrite the original tensor
# with interpolated ones. Note, the shape might differ
# this is to restore the embedding at the end
embedding_snapshot = list()
for e_var in embedding_vars:
val = e_var.eval(session=self.sess)
embedding_snapshot.append((e_var, val))
t = _interpolate_tensor(val)
op = tf.assign(e_var, t, validate_shape=False)
print("overwrite %s tensor" % e_var.name, "old_shape ->", e_var.get_shape(), "new shape ->", t.shape)
self.sess.run(op)
source_provider = InjectDataProvider(source_obj)
input_handle, _, eval_handle, _ = self.retrieve_handles()
for step_idx in range(len(alphas)):
alpha = alphas[step_idx]
print("interpolate %d -> %.4f + %d -> %.4f" % (between[0], 1. - alpha, between[1], alpha))
source_iter = source_provider.get_single_embedding_iter(self.batch_size, 0)
batch_buffer = list()
count = 0
for _, source_imgs in source_iter:
count += 1
labels = [step_idx] * self.batch_size
generated, = self.sess.run([eval_handle.generator],
feed_dict={
input_handle.real_data: source_imgs,
input_handle.embedding_ids: labels
})
merged_fake_images = merge(scale_back(generated), [self.batch_size, 1])
batch_buffer.append(merged_fake_images)
if len(batch_buffer):
save_concat_images(batch_buffer,
os.path.join(save_dir, "frame_%02d_%02d_step_%02d.png" % (
between[0], between[1], step_idx)))
# restore the embedding variables
print("restore embedding values")
for var, val in embedding_snapshot:
op = tf.assign(var, val, validate_shape=False)
self.sess.run(op)
def train(self, lr=0.0002, epoch=100, schedule=10, resume=True, flip_labels=False,
freeze_encoder=False, fine_tune=None, sample_steps=50, checkpoint_steps=500):
g_vars, d_vars = self.retrieve_trainable_vars(freeze_encoder=freeze_encoder)
input_handle, loss_handle, _, summary_handle = self.retrieve_handles()
for var in tf.trainable_variables():
print(var)
if not self.sess:
raise Exception("no session registered")
learning_rate = tf.placeholder(tf.float32, name="learning_rate")
d_optimizer = tf.train.AdamOptimizer(learning_rate, beta1=0.5).minimize(loss_handle.d_loss, var_list=d_vars)
g_optimizer = tf.train.AdamOptimizer(learning_rate, beta1=0.5).minimize(loss_handle.g_loss, var_list=g_vars)
tf.global_variables_initializer().run()
real_data = input_handle.real_data
embedding_ids = input_handle.embedding_ids
no_target_data = input_handle.no_target_data
no_target_ids = input_handle.no_target_ids
# filter by one type of labels
data_provider = TrainDataProvider(self.data_dir, filter_by=fine_tune)
total_batches = data_provider.compute_total_batch_num(self.batch_size)
val_batch_iter = data_provider.get_val_iter(self.batch_size)
saver = tf.train.Saver(max_to_keep=3)
summary_writer = tf.summary.FileWriter(self.log_dir, self.sess.graph)
if resume:
_, model_dir = self.get_model_id_and_dir()
self.restore_model(saver, model_dir)
current_lr = lr
counter = 0
start_time = time.time()
for ei in range(epoch):
train_batch_iter = data_provider.get_train_iter(self.batch_size)
if (ei + 1) % schedule == 0:
update_lr = current_lr / 2.0
# minimum learning rate guarantee
update_lr = max(update_lr, 0.0002)
print("decay learning rate from %.5f to %.5f" % (current_lr, update_lr))
current_lr = update_lr
for bid, batch in enumerate(train_batch_iter):
counter += 1
labels, batch_images = batch
shuffled_ids = labels[:]
if flip_labels:
np.random.shuffle(shuffled_ids)
# Optimize D
_, batch_d_loss, d_summary = self.sess.run([d_optimizer, loss_handle.d_loss,
summary_handle.d_merged],
feed_dict={
real_data: batch_images,
embedding_ids: labels,
learning_rate: current_lr,
no_target_data: batch_images,
no_target_ids: shuffled_ids
})
# Optimize G
_, batch_g_loss = self.sess.run([g_optimizer, loss_handle.g_loss],
feed_dict={
real_data: batch_images,
embedding_ids: labels,
learning_rate: current_lr,
no_target_data: batch_images,
no_target_ids: shuffled_ids
})
# magic move to Optimize G again
# according to https://github.com/carpedm20/DCGAN-tensorflow
# collect all the losses along the way
_, batch_g_loss, category_loss, cheat_loss, \
const_loss, l1_loss, tv_loss, g_summary = self.sess.run([g_optimizer,
loss_handle.g_loss,
loss_handle.category_loss,
loss_handle.cheat_loss,
loss_handle.const_loss,
loss_handle.l1_loss,
loss_handle.tv_loss,
summary_handle.g_merged],
feed_dict={
real_data: batch_images,
embedding_ids: labels,
learning_rate: current_lr,
no_target_data: batch_images,
no_target_ids: shuffled_ids
})
passed = time.time() - start_time
log_format = "Epoch: [%2d], [%4d/%4d] time: %4.4f, d_loss: %.5f, g_loss: %.5f, " + \
"category_loss: %.5f, cheat_loss: %.5f, const_loss: %.5f, l1_loss: %.5f, tv_loss: %.5f"
print(log_format % (ei, bid, total_batches, passed, batch_d_loss, batch_g_loss,
category_loss, cheat_loss, const_loss, l1_loss, tv_loss))
summary_writer.add_summary(d_summary, counter)
summary_writer.add_summary(g_summary, counter)
if counter % sample_steps == 0:
# sample the current model states with val data
self.validate_model(val_batch_iter, ei, counter)
if counter % checkpoint_steps == 0:
print("Checkpoint: save checkpoint step %d" % counter)
self.checkpoint(saver, counter)
# save the last checkpoint
print("Checkpoint: last checkpoint step %d" % counter)
self.checkpoint(saver, counter)
| 54.78607
| 120
| 0.566079
|
1930d67ccfcb62ff59bd40238891829208e4dc61
| 4,816
|
py
|
Python
|
be-image/server/classifier/vgg16bn.py
|
IBM/geospatial-event-observations
|
6e9bd8759f8b66c841ce0afea1ddd173c668f293
|
[
"CC-BY-4.0",
"BSD-3-Clause"
] | 1
|
2022-02-24T06:53:56.000Z
|
2022-02-24T06:53:56.000Z
|
be-image/server/classifier/vgg16bn.py
|
IBM/geospatial-event-observations
|
6e9bd8759f8b66c841ce0afea1ddd173c668f293
|
[
"CC-BY-4.0",
"BSD-3-Clause"
] | 12
|
2021-03-02T01:45:03.000Z
|
2022-03-08T23:32:46.000Z
|
be-image/server/classifier/vgg16bn.py
|
IBM/geospatial-event-observations
|
6e9bd8759f8b66c841ce0afea1ddd173c668f293
|
[
"CC-BY-4.0",
"BSD-3-Clause"
] | null | null | null |
#
# Licensed Materials - Property of IBM
# 6949-04J
# © Copyright IBM Corp. 2020 All Rights Reserved
#
from __future__ import division, print_function
import os, json
from glob import glob
import numpy as np
from scipy import misc, ndimage
from scipy.ndimage.interpolation import zoom
from keras import backend as K
from keras.layers.normalization import BatchNormalization
from keras.utils.data_utils import get_file
from keras.models import Sequential
from keras.layers.core import Flatten, Dense, Dropout, Lambda
from keras.layers.convolutional import Conv2D, MaxPooling2D, ZeroPadding2D
from keras.layers.pooling import GlobalAveragePooling2D
from keras.optimizers import SGD, Adam
from keras.preprocessing import image
# In case we are going to use the TensorFlow backend we need to explicitly set the Theano image ordering
#from keras import backend as K
#K.set_image_dim_ordering('th')
K.set_image_data_format('channels_first')
vgg_mean = np.array([123.68, 116.779, 103.939], dtype=np.float32).reshape((3,1,1))
def vgg_preprocess(x):
x = x - vgg_mean
return x[:, ::-1] # reverse axis rgb->bgr
class Vgg16BN():
"""The VGG 16 Imagenet model with Batch Normalization for the Dense Layers"""
def __init__(self, size=(224,224), include_top=True):
self.FILE_PATH = 'http://files.fast.ai/models/'
self.create(size, include_top)
self.get_classes()
def get_classes(self):
fname = 'imagenet_class_index.json'
fpath = get_file(fname, self.FILE_PATH+fname, cache_subdir='models')
with open(fpath) as f:
class_dict = json.load(f)
self.classes = [class_dict[str(i)][1] for i in range(len(class_dict))]
def predict(self, imgs, details=False):
all_preds = self.model.predict(imgs)
idxs = np.argmax(all_preds, axis=1)
preds = [all_preds[i, idxs[i]] for i in range(len(idxs))]
classes = [self.classes[idx] for idx in idxs]
return np.array(preds), idxs, classes
def ConvBlock(self, layers, filters):
model = self.model
for i in range(layers):
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(filters, (3, 3), activation='relu'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
def FCBlock(self):
model = self.model
model.add(Dense(4096, activation='relu'))
model.add(BatchNormalization())
model.add(Dropout(0.5))
def create(self, size, include_top):
if size != (224,224):
include_top=False
model = self.model = Sequential()
model.add(Lambda(vgg_preprocess, input_shape=(3,)+size, output_shape=(3,)+size))
self.ConvBlock(2, 64)
self.ConvBlock(2, 128)
self.ConvBlock(3, 256)
self.ConvBlock(3, 512)
self.ConvBlock(3, 512)
if not include_top:
fname = 'vgg16_bn_conv.h5'
model.load_weights(get_file(fname, self.FILE_PATH+fname, cache_subdir='models'))
return
model.add(Flatten())
self.FCBlock()
self.FCBlock()
model.add(Dense(1000, activation='softmax'))
fname = 'vgg16_bn.h5'
model.load_weights(get_file(fname, self.FILE_PATH+fname, cache_subdir='models'))
def get_batches(self, path, gen=image.ImageDataGenerator(), shuffle=True, batch_size=8, class_mode='categorical'):
return gen.flow_from_directory(path, target_size=(224,224),
class_mode=class_mode, shuffle=shuffle, batch_size=batch_size)
def ft(self, num):
model = self.model
model.pop()
for layer in model.layers: layer.trainable=False
model.add(Dense(num, activation='softmax'))
self.compile()
def finetune(self, batches):
self.ft(batches.nb_class)
classes = list(iter(batches.class_indices))
for c in batches.class_indices:
classes[batches.class_indices[c]] = c
self.classes = classes
def compile(self, lr=0.001):
self.model.compile(optimizer=Adam(lr=lr),
loss='categorical_crossentropy', metrics=['accuracy'])
def fit_data(self, trn, labels, val, val_labels, nb_epoch=1, batch_size=64):
self.model.fit(trn, labels, nb_epoch=nb_epoch,
validation_data=(val, val_labels), batch_size=batch_size)
def fit(self, batches, val_batches, nb_epoch=1):
self.model.fit_generator(batches, samples_per_epoch=batches.nb_sample, nb_epoch=nb_epoch,
validation_data=val_batches, nb_val_samples=val_batches.nb_sample)
def test(self, path, batch_size=8):
test_batches = self.get_batches(path, shuffle=False, batch_size=batch_size, class_mode=None)
return test_batches, self.model.predict_generator(test_batches, test_batches.nb_sample)
| 33.915493
| 118
| 0.671304
|
8359fe231b670fc2762da0a09f0e12f8ce4722a0
| 2,694
|
py
|
Python
|
src/read/story/graph/Paragraph.py
|
cqpancoast/solaria
|
c2f60e1102e9ac1ea5350302653ce2b92e6e25c0
|
[
"MIT"
] | null | null | null |
src/read/story/graph/Paragraph.py
|
cqpancoast/solaria
|
c2f60e1102e9ac1ea5350302653ce2b92e6e25c0
|
[
"MIT"
] | null | null | null |
src/read/story/graph/Paragraph.py
|
cqpancoast/solaria
|
c2f60e1102e9ac1ea5350302653ce2b92e6e25c0
|
[
"MIT"
] | null | null | null |
class Paragraph:
"""A Paragraph is a "position" in a GraphStory that a Reader can be
in.
Paragraphs display prompts upon reader entry depending upon the
Reader state. A Paragraph has zero or more directed edges, called
Phrases, that direct to a destination Paragraph. This new Paragraph
can be the origin Paragraph. A Paragraph also has zero or more
Phrases that feed into it.
The Paragraph has a field called conditional_prompts which is a list
of maps from determiners to prompts. prompt_conditionally returns
the corresponding prompt, or a default prompt.
The inhabiting of a Paragraph never deviates from this prescription:
- The Paragraph accepts the Reader, and prints out a prompt
depending on the Reader's state.
- The reader (lowercase R) produces an input.
- The Paragraph sends that input off to the interpreter, along
with some information about itself. TODO clarify this and below
- Based on the response from the interpreter, the Paragraph either
prints out some kind of error-style message or sends a user down
a Phrase.
A Paragraph also has the additional layer property. Layers are a
field in a Paragraph that allows a Reader to inhabit multiple
Paragraphs at once. This is accomplished by two or more Phrases
pointing away from the same Paragraph having the same name, all
pointing to Paragraphs from different layers. It is illegal to have
two Phrases of the same name and source paragraph directed towards
Paragraphs on the same layer.
"""
def __init__(self):
"""Initializes this Paragraph with an empty conditional_print dict."""
self.__init__([])
self.conditional_prompts = None # NOTE do this REALLY need inclusion?
def __init__(self, conditional_prompts):
"""Initializes this Paragraph with a Reader property to response
dict"""
assert type(conditional_prompts, list) # NOTE this good?
self.conditional_prompts = conditional_prompts
# NOTE see above docs
def accept_reader(self, reader):
assert type(reader, dict)
self.prompt_conditionally(reader)
def prompt_conditionally(self, reader):
"""Returns the correct prompt for this paragraph depending on the
Reader's stats
"""
for condition in self.conditional_prompts:
pass # print conditional_prompt if reader passes condition
# TODO require condition has default prompt, or code that in?
return ""
# TODO function for determining if Reader satisfies condition
# TODO maybe add some stuff with IDs and whatever
| 42.09375
| 78
| 0.711952
|
0272df4a82f4f5a68d45743ddad7e1bf3055746e
| 227
|
py
|
Python
|
PR_BCI_team/Team_StarLab/DKHan/examples/giga_cnn/csv_plot.py
|
PatternRecognition/OpenBMI
|
d9291ddb81f4319fb3764d7192e0363939a62ee9
|
[
"MIT"
] | 217
|
2015-11-02T11:10:29.000Z
|
2022-03-22T07:01:12.000Z
|
PR_BCI_team/Team_StarLab/DKHan/examples/giga_cnn/csv_plot.py
|
deep-bci-g/OpenBMI
|
75daf901b2dbe215852cbff243606dcfcd10f05c
|
[
"MIT"
] | 24
|
2015-11-02T11:10:45.000Z
|
2021-09-08T11:10:33.000Z
|
PR_BCI_team/Team_StarLab/DKHan/examples/giga_cnn/csv_plot.py
|
deep-bci-g/OpenBMI
|
75daf901b2dbe215852cbff243606dcfcd10f05c
|
[
"MIT"
] | 112
|
2016-01-22T01:45:44.000Z
|
2022-03-22T07:08:19.000Z
|
import pandas as pd
df = pd.read_csv('C:\\Users\\dk\\Downloads\\run-Jun05_11-37-22_DESKTOP-NMLMMUUfold_3_g_0.7-tag-Eval_Acc.csv')
print(df)
from matplotlib import pyplot as plt
plt.bar(df.Step , df['Value'])
plt.show()
| 15.133333
| 109
| 0.722467
|
1fc586e126625de493530bd706bcb995aab4194a
| 725
|
py
|
Python
|
venv/lib/python3.9/site-packages/google/cloud/monitoring_v3/types/query_service.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | 18
|
2020-09-19T17:52:47.000Z
|
2022-03-25T12:09:22.000Z
|
venv/lib/python3.9/site-packages/google/cloud/monitoring_v3/types/query_service.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | 110
|
2020-02-05T15:26:47.000Z
|
2022-03-28T23:02:02.000Z
|
venv/lib/python3.9/site-packages/google/cloud/monitoring_v3/types/query_service.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | 26
|
2020-02-08T00:05:46.000Z
|
2022-03-27T19:32:26.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__protobuf__ = proto.module(package="google.monitoring.v3", manifest={},)
__all__ = tuple(sorted(__protobuf__.manifest))
| 32.954545
| 74
| 0.747586
|
a81b7dfff3e1cbf0ca841ddaa31eb4169c38737a
| 6,621
|
py
|
Python
|
Lib/fontbakery/reporters/ghmarkdown.py
|
RosaWagner/fontbakery
|
e5b57a4938f0f25cb722cc18cacb4836657b1ff9
|
[
"Apache-2.0"
] | 351
|
2015-01-12T09:27:03.000Z
|
2022-03-24T14:37:56.000Z
|
Lib/fontbakery/reporters/ghmarkdown.py
|
RosaWagner/fontbakery
|
e5b57a4938f0f25cb722cc18cacb4836657b1ff9
|
[
"Apache-2.0"
] | 2,308
|
2015-01-07T10:49:14.000Z
|
2022-03-31T22:55:21.000Z
|
Lib/fontbakery/reporters/ghmarkdown.py
|
RosaWagner/fontbakery
|
e5b57a4938f0f25cb722cc18cacb4836657b1ff9
|
[
"Apache-2.0"
] | 89
|
2015-03-02T17:31:04.000Z
|
2022-03-16T13:18:59.000Z
|
import os
from fontbakery.reporters.serialize import SerializeReporter
from fontbakery.checkrunner import Status
from fontbakery import __version__ as version
LOGLEVELS=["ERROR","FAIL","WARN","SKIP","INFO","PASS","DEBUG"]
class GHMarkdownReporter(SerializeReporter):
def __init__(self, loglevels, **kwd):
super().__init__(**kwd)
self.loglevels = loglevels
def write(self):
with open(self.output_file, "w") as fh:
fh.write(self.get_markdown())
print(f'A report in GitHub Markdown format which can be useful\n'
f' for posting issues on a GitHub issue tracker has been\n'
f' saved to "{self.output_file}"')
def emoticon(self, name):
return {
'ERROR': "\U0001F494", # 💔 :broken_heart:
'FAIL': "\U0001F525", # 🔥 :fire:
'WARN': "\U000026A0", # ⚠️ :warning:
'INFO': "\U00002139", # ℹ️ :information_source:
'SKIP': "\U0001F4A4", # 💤 :zzz:
'PASS': "\U0001F35E", # 🍞 :bread
'DEBUG': "\U0001F50E", # 🔎 :mag_right:
}[name]
def html5_collapsible(self, summary, details):
return (f"<details>\n"
f"<summary>{summary}</summary>\n"
f"{details}\n"
f"</details>\n")
def log_md(self, log):
if not self.omit_loglevel(log["status"]):
return "* {} **{}** {}\n".format(self.emoticon(log["status"]),
log["status"],
log["message"])
else:
return ""
def render_rationale(self, check, checkid):
if not "rationale" in check:
return ""
# Ideally we'll at some point invoke a proper markdown
# parser here. But for now, let's simply fill the raw
# content into an 80-column block of text and output it
# enclosed in <pre></pre> tags...
import html
from fontbakery.utils import text_flow, unindent_rationale
content = unindent_rationale(check['rationale'], checkid)
rationale = html.escape(text_flow(content, 80))
return f"<pre>--- Rationale ---\n{rationale}</pre>\n"
def check_md(self, check):
checkid = check["key"][1].split(":")[1].split(">")[0]
profile = check["profile"]
check["logs"].sort(key=lambda c: c["status"])
logs = "".join(map(self.log_md, check["logs"]))
github_search_url = (f"[{checkid}]"
f"(https://font-bakery.readthedocs.io/en/latest"
f"/fontbakery/profiles/{profile}.html#{checkid})")
rationale = self.render_rationale(check, checkid)
return self.html5_collapsible("{} <b>{}:</b> {}".format(self.emoticon(check["result"]),
check["result"],
check["description"]),
f"\n* {github_search_url}\n{rationale}\n{logs}")
def omit_loglevel(self, msg):
return self.loglevels and (self.loglevels[0] > Status(msg))
def deduce_profile_from_section_name(self, section):
# This is very hacky!
# We should have a much better way of doing it...
if 'Google Fonts' in section: return 'googlefonts'
if 'Adobe' in section: return 'adobefonts'
if 'Type Network' in section: return 'typenetwork'
if 'Universal' in section: return 'universal'
if 'Basic UFO checks' in section: return 'ufo_sources'
if 'Checks inherited from Microsoft Font Validator' in section: return 'fontval'
if 'fontbakery.profiles.' in section: return section.split('fontbakery.profiles.')[1].split('>')[0]
return section
def get_markdown(self):
checks = {}
family_checks = []
data = self.getdoc()
num_checks = 0
for section in data["sections"]:
for cluster in section["checks"]:
if not isinstance(cluster, list):
cluster = [cluster]
num_checks += len(cluster)
for check in cluster:
if self.omit_loglevel(check["result"]):
continue
check['profile'] = self.deduce_profile_from_section_name(section["key"][0])
if "filename" not in check.keys():
# That's a family check!
family_checks.append(check)
else:
key = os.path.basename(check["filename"])
if key not in checks:
checks[key] = []
checks[key].append(check)
md = (f"## Fontbakery report\n"
f"\n"
f"Fontbakery version: {version}\n"
f"\n")
if family_checks:
family_checks.sort(key=lambda c: c["result"])
md += self.html5_collapsible("<b>[{}] Family checks</b>".format(len(family_checks)),
"".join(map(self.check_md, family_checks)) + "<br>")
for filename in checks.keys():
checks[filename].sort(key=lambda c: LOGLEVELS.index(c["result"]))
md += self.html5_collapsible("<b>[{}] {}</b>".format(len(checks[filename]),
filename),
"".join(map(self.check_md, checks[filename])) + "<br>")
if num_checks != 0:
summary_table = "### Summary\n\n" + \
("| {} " + " | {} ".join(LOGLEVELS) + " |\n").format(*[self.emoticon(k)
for k in LOGLEVELS]) + \
("|:-----:|:----:|:----:|:----:|:----:|:----:|:----:|\n"
"| {} | {} | {} | {} | {} | {} | {} |\n"
"").format(*[data["result"][k]
for k in LOGLEVELS]) + \
("| {:.0f}% | {:.0f}% | {:.0f}% | {:.0f}% | {:.0f}% | {:.0f}% | {:.0f}% |\n"
"").format(*[100*data["result"][k]/num_checks
for k in LOGLEVELS])
md += "\n" + summary_table
omitted = [l for l in LOGLEVELS if self.omit_loglevel(l)]
if omitted:
md += "\n" + \
"**Note:** The following loglevels were omitted in this report:\n" + \
"".join(map("* **{}**\n".format, omitted))
return md
| 41.641509
| 107
| 0.488748
|
3a18658218131d86ba2e6de06fd67753fde4176c
| 1,660
|
py
|
Python
|
lte/gateway/python/magma/subscriberdb/crypto/gsm.py
|
remo5000/magma
|
1d1dd9a23800a8e07b1ce016776d93e12430ec15
|
[
"BSD-3-Clause"
] | 84
|
2016-11-03T20:51:09.000Z
|
2018-09-13T04:36:18.000Z
|
lte/gateway/python/magma/subscriberdb/crypto/gsm.py
|
remo5000/magma
|
1d1dd9a23800a8e07b1ce016776d93e12430ec15
|
[
"BSD-3-Clause"
] | 79
|
2016-11-10T06:30:58.000Z
|
2018-06-01T14:29:39.000Z
|
lte/gateway/python/magma/subscriberdb/crypto/gsm.py
|
119Vik/magma-1
|
107a7b374466a837fc0a49b283ba9d6ff1d702e3
|
[
"BSD-3-Clause"
] | 37
|
2016-11-03T22:53:22.000Z
|
2018-09-07T15:32:16.000Z
|
"""
Copyright (c) 2016-present, Facebook, Inc.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory.
"""
import abc
from .utils import CryptoError
class GSMA3A8Algo(metaclass=abc.ABCMeta):
"""
Abstract class for the GSM A3/A8 algorithms. The A3/A8 algos take
the key and random variable as input, and produce an auth tuple as output.
"""
@abc.abstractmethod
def generate_auth_tuple(self, key):
"""
Args:
key - secret key for a subscriberdb
Returns:
(rand, sres, cipher_key) auth tuple
Raises:
CryptoError on any error
"""
raise NotImplementedError()
class UnsafePreComputedA3A8(GSMA3A8Algo):
"""
Sample implementation of the A3/A8 algo. This algo expects the auth
tuple to be stored directly as the key for the subscriber.
Essentially this algo doesn't do any random number generation or crypto
operation, but provides a dummy implementation of the A3/A8 interfaces.
"""
def generate_auth_tuple(self, key):
"""
Args:
key - 28 byte long auth tuple
Returns:
(rand, sres, cipher_key) tuple
Raises:
CryptoError if the key is not 28 byte long
"""
if len(key) != 28:
raise CryptoError('Invalid auth vector: %s' % key)
rand = key[:16]
sres = key[16:20]
cipher_key = key[20:]
return (rand, sres, cipher_key)
| 28.62069
| 78
| 0.637952
|
2a537925387d65a9c3ff79b709714161cc05a378
| 9,919
|
py
|
Python
|
imcsdk/mometa/comm/CommSnmp.py
|
TetrationAnalytics/imcsdk
|
d86e47831f294dc9fa5e99b9a92abceac2502d76
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/mometa/comm/CommSnmp.py
|
TetrationAnalytics/imcsdk
|
d86e47831f294dc9fa5e99b9a92abceac2502d76
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/mometa/comm/CommSnmp.py
|
TetrationAnalytics/imcsdk
|
d86e47831f294dc9fa5e99b9a92abceac2502d76
|
[
"Apache-2.0"
] | 2
|
2016-05-26T02:05:46.000Z
|
2017-09-13T05:13:25.000Z
|
"""This module contains the general information for CommSnmp ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class CommSnmpConsts:
ADMIN_STATE_DISABLED = "disabled"
ADMIN_STATE_ENABLED = "enabled"
COM2_SEC_NONE = "None"
COM2_SEC_DISABLED = "disabled"
COM2_SEC_FULL = "full"
COM2_SEC_LIMITED = "limited"
CONFIG_CHANGE_COMMIT = "commit"
CONFIG_CHANGE_NO_COMMIT = "no-commit"
PROTO_ALL = "all"
PROTO_NONE = "none"
PROTO_TCP = "tcp"
PROTO_UDP = "udp"
class CommSnmp(ManagedObject):
"""This is CommSnmp class."""
consts = CommSnmpConsts()
naming_props = set([])
mo_meta = {
"classic": MoMeta("CommSnmp", "commSnmp", "snmp-svc", VersionMeta.Version151f, "InputOutput", 0x1fff, [], ["admin", "read-only", "user"], ['commSvcEp'], ['commSnmpConfigCommit', 'commSnmpTrap', 'commSnmpUser'], ["Get", "Set"]),
"modular": MoMeta("CommSnmp", "commSnmp", "snmp-svc", VersionMeta.Version2013e, "InputOutput", 0x1fff, [], ["admin", "read-only", "user"], ['commSvcEp'], ['commSnmpConfigCommit', 'commSnmpTrap', 'commSnmpUser'], ["Get", "Set"])
}
prop_meta = {
"classic": {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"com2_sec": MoPropertyMeta("com2_sec", "com2Sec", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x4, None, None, None, ["None", "disabled", "full", "limited"], []),
"community": MoPropertyMeta("community", "community", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x8, None, None, r"""[!#$%\(\)\*\+,\-\./:<=\[\]\^_\{\}~a-zA-Z0-9]{0,18}""", [], []),
"config_change": MoPropertyMeta("config_change", "configChange", "string", VersionMeta.Version401a, MoPropertyMeta.READ_WRITE, 0x10, 0, 510, None, ["commit", "no-commit"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x20, 0, 255, None, [], []),
"engine_id_key": MoPropertyMeta("engine_id_key", "engineIdKey", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x40, 0, 27, r"""[^#!&]{0,27}""", [], []),
"port": MoPropertyMeta("port", "port", "uint", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, [], ["1-65535"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x100, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x200, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"sys_contact": MoPropertyMeta("sys_contact", "sysContact", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x400, 0, 64, None, [], []),
"sys_location": MoPropertyMeta("sys_location", "sysLocation", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x800, 0, 64, None, [], []),
"trap_community": MoPropertyMeta("trap_community", "trapCommunity", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x1000, None, None, r"""[!#$%\(\)\*\+,\-\./:<=\[\]\^_\{\}~a-zA-Z0-9]{0,18}""", [], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version151f, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"engine_id": MoPropertyMeta("engine_id", "engineId", "string", VersionMeta.Version209c, MoPropertyMeta.READ_ONLY, None, 0, 255, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"proto": MoPropertyMeta("proto", "proto", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, None, None, None, ["all", "none", "tcp", "udp"], []),
"snmp_config_in_progress": MoPropertyMeta("snmp_config_in_progress", "snmpConfigInProgress", "string", VersionMeta.Version401a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
},
"modular": {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["Disabled", "Enabled", "disabled", "enabled"], []),
"com2_sec": MoPropertyMeta("com2_sec", "com2Sec", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4, None, None, None, ["None", "disabled", "full", "limited"], []),
"community": MoPropertyMeta("community", "community", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8, None, None, r"""[!#$%\(\)\*\+,\-\./:<=\[\]\^_\{\}~a-zA-Z0-9]{0,18}""", [], []),
"config_change": MoPropertyMeta("config_change", "configChange", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x10, 0, 510, None, ["commit", "no-commit"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x20, 0, 255, None, [], []),
"engine_id_key": MoPropertyMeta("engine_id_key", "engineIdKey", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x40, 0, 27, r"""[^#!&]{0,27}""", [], []),
"port": MoPropertyMeta("port", "port", "uint", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, [], ["1-65535"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x100, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x200, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"sys_contact": MoPropertyMeta("sys_contact", "sysContact", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x400, 0, 64, None, [], []),
"sys_location": MoPropertyMeta("sys_location", "sysLocation", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x800, 0, 64, None, [], []),
"trap_community": MoPropertyMeta("trap_community", "trapCommunity", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x1000, None, None, r"""[!#$%\(\)\*\+,\-\./:<=\[\]\^_\{\}~a-zA-Z0-9]{0,18}""", [], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"engine_id": MoPropertyMeta("engine_id", "engineId", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 255, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"proto": MoPropertyMeta("proto", "proto", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, ["all", "none", "tcp", "udp"], []),
"snmp_config_in_progress": MoPropertyMeta("snmp_config_in_progress", "snmpConfigInProgress", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
},
}
prop_map = {
"classic": {
"adminState": "admin_state",
"com2Sec": "com2_sec",
"community": "community",
"configChange": "config_change",
"dn": "dn",
"engineIdKey": "engine_id_key",
"port": "port",
"rn": "rn",
"status": "status",
"sysContact": "sys_contact",
"sysLocation": "sys_location",
"trapCommunity": "trap_community",
"childAction": "child_action",
"descr": "descr",
"engineId": "engine_id",
"name": "name",
"proto": "proto",
"snmpConfigInProgress": "snmp_config_in_progress",
},
"modular": {
"adminState": "admin_state",
"com2Sec": "com2_sec",
"community": "community",
"configChange": "config_change",
"dn": "dn",
"engineIdKey": "engine_id_key",
"port": "port",
"rn": "rn",
"status": "status",
"sysContact": "sys_contact",
"sysLocation": "sys_location",
"trapCommunity": "trap_community",
"childAction": "child_action",
"descr": "descr",
"engineId": "engine_id",
"name": "name",
"proto": "proto",
"snmpConfigInProgress": "snmp_config_in_progress",
},
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.admin_state = None
self.com2_sec = None
self.community = None
self.config_change = None
self.engine_id_key = None
self.port = None
self.status = None
self.sys_contact = None
self.sys_location = None
self.trap_community = None
self.child_action = None
self.descr = None
self.engine_id = None
self.name = None
self.proto = None
self.snmp_config_in_progress = None
ManagedObject.__init__(self, "CommSnmp", parent_mo_or_dn, **kwargs)
| 67.02027
| 235
| 0.605202
|
8654ea3c4e3ead13e4a753c03a0e4fca3f698c75
| 8,736
|
py
|
Python
|
libweasyl/libweasyl/test/test_text.py
|
kfkitsune/weasyl
|
7e63c6db98ed2debfadbc277509533f72ea078a5
|
[
"Apache-2.0"
] | 111
|
2016-05-18T04:18:18.000Z
|
2021-11-03T02:05:19.000Z
|
libweasyl/libweasyl/test/test_text.py
|
Weasyl/weasyl
|
80c86942c6f20a815086e2895fdad51d3aa77eed
|
[
"Apache-2.0"
] | 1,103
|
2016-05-29T05:17:53.000Z
|
2022-03-31T18:12:40.000Z
|
libweasyl/libweasyl/test/test_text.py
|
kfkitsune/weasyl
|
7e63c6db98ed2debfadbc277509533f72ea078a5
|
[
"Apache-2.0"
] | 47
|
2016-05-29T20:48:37.000Z
|
2021-11-12T09:40:40.000Z
|
# encoding: utf-8
from lxml.etree import LIBXML_VERSION
import pytest
from libweasyl.text import markdown, markdown_excerpt, markdown_link
libxml_xfail = pytest.mark.xfail(LIBXML_VERSION < (2, 9), reason='libxml2 too old to preserve whitespace')
user_linking_markdown_tests = [
('<~spam>', '<a href="/~spam">spam</a>'),
('<!spam>', '<a href="/~spam" class="user-icon"><img src="/~spam/avatar" alt="spam"></a>'),
('',
'<a href="/~spam" class="user-icon"><img src="/~spam/avatar" alt="spam"></a>'
'<a href="/~spam" class="user-icon"><img src="/~spam/avatar" alt="spam"></a>'),
('<!~spam>', '<a href="/~spam" class="user-icon"><img src="/~spam/avatar" alt="spam"> <span>spam</span></a>'),
('', '<a href="/~example" class="user-icon"><img src="/~example/avatar"> <span>user image with alt text</span></a>'),
('<user:spam>', '<a href="/~spam">spam</a>'),
('[link](user:spam)', '<a href="/~spam">link</a>'),
('<fa:spam>', '<a href="https://www.furaffinity.net/user/spam" rel="nofollow ugc">spam</a>'),
('<da:spam>', '<a href="https://www.deviantart.com/spam" rel="nofollow ugc">spam</a>'),
('<ib:spam>', '<a href="https://inkbunny.net/spam" rel="nofollow ugc">spam</a>'),
('<sf:spam>', '<a href="https://spam.sofurry.com/" rel="nofollow ugc">spam</a>'),
]
@pytest.mark.parametrize(('target', 'expected'), user_linking_markdown_tests)
def test_markdown_basic_user_linking(target, expected):
assert markdown(target) == '<p>%s</p>\n' % (expected,)
@pytest.mark.parametrize(('target', 'expected'), user_linking_markdown_tests)
def test_markdown_user_linking_in_tag(target, expected):
assert markdown('<em>%s</em>' % (target,)) == '<p><em>%s</em></p>\n' % (expected,)
@pytest.mark.parametrize(('target', 'expected'), user_linking_markdown_tests)
def test_markdown_user_linking_in_tail(target, expected):
assert markdown('<em>eggs</em>%s' % (target,)) == '<p><em>eggs</em>%s</p>\n' % (expected,)
@libxml_xfail
@pytest.mark.parametrize(('target', 'expected'), user_linking_markdown_tests)
def test_markdown_user_linking_twice_in_tag(target, expected):
assert markdown('<em>%s %s</em>' % (target, target)) == '<p><em>%s %s</em></p>\n' % (expected, expected)
@pytest.mark.parametrize(('target', 'expected'), user_linking_markdown_tests)
def test_markdown_user_linking_twice_in_tag_with_more_text_between(target, expected):
assert markdown('<em>%s spam %s</em>' % (target, target)) == '<p><em>%s spam %s</em></p>\n' % (expected, expected)
@libxml_xfail
@pytest.mark.parametrize(('target', 'expected'), user_linking_markdown_tests)
def test_markdown_user_linking_twice_in_tail(target, expected):
assert markdown('<em>eggs</em>%s %s' % (target, target)) == (
'<p><em>eggs</em>%s %s</p>\n' % (expected, expected))
@pytest.mark.parametrize(('target', 'expected'), user_linking_markdown_tests)
def test_markdown_user_linking_twice_in_tail_with_more_text_betweeen(target, expected):
assert markdown('<em>eggs</em>%s spam %s' % (target, target)) == (
'<p><em>eggs</em>%s spam %s</p>\n' % (expected, expected))
@pytest.mark.parametrize(('target', 'expected'), user_linking_markdown_tests)
def test_markdown_user_linking_in_markdown(target, expected):
assert markdown('*%s*' % (target,)) == '<p><em>%s</em></p>\n' % (expected,)
def test_markdown_no_user_links_in_code():
assert markdown('<code><~spam></code>') == '<p><code><~spam></code></p>\n'
def test_markdown_no_user_links_in_pre():
assert markdown('<pre><~spam></pre>') == '<pre><p><~spam></p></pre>\n'
def test_markdown_no_user_links_in_links():
assert markdown('<a><~spam></a>') == '<p><a><~spam></a></p>\n'
def test_markdown_escaped_user_link():
assert markdown('\\\\<~spam>') == '<p><~spam></p>\n'
def test_markdown_multi_element():
assert markdown('one\n\ntwo') == '<p>one</p>\n\n<p>two</p>\n'
def test_markdown_user_linking_with_underscore():
assert markdown('<~hello_world>') == '<p><a href="/~helloworld">hello_world</a></p>\n'
def test_markdown_image_replacement():
assert markdown('') == '<p><a href="http://example" rel="nofollow ugc">example</a></p>\n'
assert markdown('<img alt="broken">') == '<p><a href="">broken</a></p>\n'
def test_forum_whitelist():
assert markdown('https://forums.weasyl.com/foo') == (
'<p><a href="https://forums.weasyl.com/foo">https://forums.weasyl.com/foo</a></p>\n')
def test_markdown_no_autolink_in_html_link():
assert markdown('[https://foo.test/](https://bar.test/)') == '<p><a href="https://bar.test/" rel="nofollow ugc">https://foo.test/</a></p>\n'
assert markdown('[@foo@bar.test](https://baz.test/)') == '<p><a href="https://baz.test/" rel="nofollow ugc">@foo@bar.test</a></p>\n'
assert markdown('<a href="https://bar.test/">https://foo.test/</a>') == '<p><a href="https://bar.test/" rel="nofollow ugc">https://foo.test/</a></p>\n'
assert markdown('<A href="https://baz.test/">@foo@bar.test</A>') == '<p><a href="https://baz.test/" rel="nofollow ugc">@foo@bar.test</a></p>\n'
assert markdown('<a href="https://baz.test/">@foo@bar.test</a>') == '<p><a href="https://baz.test/" rel="nofollow ugc">@foo@bar.test</a></p>\n'
assert markdown('<b>https://foo.test/</b>') == '<p><b><a href="https://foo.test/" rel="nofollow ugc">https://foo.test/</a></b></p>\n'
assert markdown('<b>@foo@bar.test</b>') == '<p><b>@<a href="mailto:foo@bar.test">foo@bar.test</a></b></p>\n'
def test_markdown_unordered_list():
assert markdown('- five\n- six\n- seven') == '<ul><li>five</li>\n<li>six</li>\n<li>seven</li>\n</ul>'
def test_markdown_regular_ordered_list_start():
assert markdown('1. five\n1. six\n1. seven') == '<ol start="1"><li>five</li>\n<li>six</li>\n<li>seven</li>\n</ol>'
def test_markdown_respect_ordered_list_start():
assert markdown('5. five\n6. six\n7. seven') == '<ol start="5"><li>five</li>\n<li>six</li>\n<li>seven</li>\n</ol>'
def test_markdown_strikethrough():
assert markdown(u"~~test~~") == u"<p><del>test</del></p>\n"
@pytest.mark.parametrize(('target', 'expected'), [
(u"[external](http://example.com/)", u'<a href="http://example.com/" rel="nofollow ugc">external</a>'),
(u'<a href="http://example.com/">external</a>', u'<a href="http://example.com/" rel="nofollow ugc">external</a>'),
(u'<a href="http://example.com/" rel="noreferrer">external</a>', u'<a href="http://example.com/" rel="nofollow ugc">external</a>'),
(u"[external](//example.com/)", u'<a href="//example.com/" rel="nofollow ugc">external</a>'),
])
def test_markdown_external_link_noreferrer(target, expected):
assert markdown(target) == u"<p>%s</p>\n" % (expected,)
markdown_link_tests = [
(('spam', '/eggs'), '[spam](/eggs)'),
((']spam[', '/eggs'), r'[\]spam\[](/eggs)'),
(('[[spam]', '/eggs'), r'[\[\[spam\]](/eggs)'),
]
@pytest.mark.parametrize(('target', 'expected'), markdown_link_tests)
def test_markdown_link(target, expected):
assert markdown_link(*target) == expected
def test_tag_stripping():
assert markdown(u"<button>text</button>") == u"<p>text</p>\n"
assert markdown(u"<button><button>text</button></button>") == u"<p>text</p>\n"
assert markdown(u"<!--[if IE]><script>alert(1)</script><![endif]-->") == u"\n"
markdown_excerpt_tests = [
(u'', u''),
(u'short', u'short'),
(u'just short enoughAAAAAAAAAAAAA', u'just short enoughAAAAAAAAAAAAA'),
(u'not short enoughAAAAAAAAAAAAAAA', u'not short enoughAAAAAAAAAAAAA…'),
(u'*leading* inline formatting', u'leading inline formatting'),
(u'middle *inline* formatting', u'middle inline formatting'),
(u'trailing inline *formatting*', u'trailing inline formatting'),
(u'*nested **inline** formatting*', u'nested inline formatting'),
(u' unnecessary whitespace\t', u'unnecessary whitespace'),
(u'multiple\nlines', u'multiple lines'),
(u'multiple \nlines', u'multiple lines'),
(u'multiple\n\nparagraphs', u'multiple paragraphs'),
(u'Üñíçôđe\N{COMBINING ACUTE ACCENT}', u'Üñíçôđe\N{COMBINING ACUTE ACCENT}'),
(u'single-codepoint graphemes😊😊😊😊', u'single-codepoint graphemes😊😊😊😊'),
(u'single-codepoint graphemes😊😊😊😊😊', u'single-codepoint graphemes😊😊😊…'),
(u'test\n - lists\n - of\n - items\n\ntest', u'test lists of items test'),
]
@pytest.mark.parametrize(('target', 'expected'), markdown_excerpt_tests)
def test_excerpt(target, expected):
assert markdown_excerpt(target, length=30) == expected
def test_excerpt_default_length():
assert markdown_excerpt(u'a' * 300) == u'a' * 300
assert markdown_excerpt(u'a' * 301) == u'a' * 299 + u'…'
| 46.716578
| 162
| 0.643658
|
acdb91b87c82150b117eee8f27304c79d86e78fa
| 83,744
|
py
|
Python
|
xrt/backends/raycing/materials.py
|
adinatan/xrt
|
75b884c0cba7e1aac15b30f2d0d803597328a208
|
[
"MIT"
] | null | null | null |
xrt/backends/raycing/materials.py
|
adinatan/xrt
|
75b884c0cba7e1aac15b30f2d0d803597328a208
|
[
"MIT"
] | null | null | null |
xrt/backends/raycing/materials.py
|
adinatan/xrt
|
75b884c0cba7e1aac15b30f2d0d803597328a208
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Materials
---------
Module :mod:`~xrt.backends.raycing.materials` defines atomic and material
properties related to x-ray scattering, diffraction and propagation:
reflectivity, transmittivity, refractive index, absorption coefficient etc.
.. autofunction:: read_atomic_data
.. autoclass:: Element()
:members: __init__, read_f0_Kissel, get_f0, read_f1f2_vs_E, get_f1f2
.. autoclass:: Material()
:members: __init__, get_refractive_index, get_absorption_coefficient,
get_amplitude
.. autoclass:: Multilayer()
:members: __init__, get_t_thickness, get_b_thickness, get_amplitude,
get_dtheta_symmetric_Bragg
.. autoclass:: Coated()
:members: __init__
.. autoclass:: Crystal(Material)
:members: __init__, get_Darwin_width, get_amplitude,
get_dtheta_symmetric_Bragg, get_dtheta, get_dtheta_regular,
get_refractive_correction
.. autoclass:: CrystalFcc(Crystal)
:members: get_structure_factor
.. autoclass:: CrystalDiamond(CrystalFcc)
:members: get_structure_factor
.. autoclass:: CrystalSi(CrystalDiamond)
:members: __init__, dl_l, get_a, get_Bragg_offset
.. autoclass:: CrystalFromCell(Crystal)
:members: __init__
.. autoclass:: Powder(CrystalFromCell)
:members: __init__
.. autoclass:: CrystalHarmonics(CrystalFromCell)
:members: __init__
.. autoclass:: MonoCrystal(CrystalFromCell)
:members: __init__
"""
__author__ = "Konstantin Klementiev, Roman Chernikov"
__date__ = "16 Mar 2017"
__all__ = ('Material', 'EmptyMaterial', 'Multilayer', 'GradedMultilayer',
'Coated', 'Crystal', 'CrystalFcc',
'CrystalDiamond', 'CrystalSi', 'CrystalFromCell',
'Powder', 'CrystalHarmonics', 'MonoCrystal')
import collections
__allSectioned__ = collections.OrderedDict([
('Material', None),
('Crystals', ('CrystalSi', 'CrystalDiamond', 'CrystalFcc',
'CrystalFromCell')), # don't include 'Crystal'
('Layered', ('Coated', 'Multilayer', 'GradedMultilayer')),
('Advanced', ('Powder', 'CrystalHarmonics', 'MonoCrystal', 'EmptyMaterial'))
])
import sys
import os
import time
# import struct
import pickle
import numpy as np
from scipy.special import jn as besselJn
from .. import raycing
from .physconsts import PI, PI2, CH, CHBAR, R0, AVOGADRO, SQRT2PI
try:
import pyopencl as cl # analysis:ignore
isOpenCL = True
except ImportError:
isOpenCL = False
ch = CH # left here for copatibility
chbar = CHBAR # left here for copatibility
try: # for Python 3 compatibility:
unicode = unicode
except NameError:
# 'unicode' is undefined, must be Python 3
unicode = str
basestring = (str, bytes)
else:
# 'unicode' exists, must be Python 2
unicode = unicode
basestring = basestring
elementsList = (
'none', 'H', 'He', 'Li', 'Be', 'B', 'C', 'N', 'O', 'F', 'Ne',
'Na', 'Mg', 'Al', 'Si', 'P', 'S', 'Cl', 'Ar', 'K', 'Ca', 'Sc', 'Ti', 'V',
'Cr', 'Mn', 'Fe', 'Co', 'Ni', 'Cu', 'Zn', 'Ga', 'Ge', 'As', 'Se', 'Br',
'Kr', 'Rb', 'Sr', 'Y', 'Zr', 'Nb', 'Mo', 'Tc', 'Ru', 'Rh', 'Pd', 'Ag',
'Cd', 'In', 'Sn', 'Sb', 'Te', 'I', 'Xe', 'Cs', 'Ba', 'La', 'Ce', 'Pr',
'Nd', 'Pm', 'Sm', 'Eu', 'Gd', 'Tb', 'Dy', 'Ho', 'Er', 'Tm', 'Yb', 'Lu',
'Hf', 'Ta', 'W', 'Re', 'Os', 'Ir', 'Pt', 'Au', 'Hg', 'Tl', 'Pb', 'Bi',
'Po', 'At', 'Rn', 'Fr', 'Ra', 'Ac', 'Th', 'Pa', 'U')
def read_atomic_data(elem):
u"""
Reads atomic data from ``AtomicData.dat`` file adopted from XOP [XOP]_.
It has the following data:
0 AtomicRadius[Å] CovalentRadius[Å] AtomicMass BoilingPoint[K]
MeltingPoint[K] Density[g/ccm] AtomicVolume
CoherentScatteringLength[1E-12cm] IncoherentX-section[barn]
Absorption@1.8Å[barn] DebyeTemperature[K] ThermalConductivity[W/cmK]
In :meth:`read_atomic_data` only the mass is inquired. The user may
extend the method to get the other values by simply adding the
corresponding array elements to the returned value."""
if isinstance(elem, basestring):
Z = elementsList.index(elem)
elif isinstance(elem, int):
Z = elem
else:
raise NameError('Wrong element')
dataDir = os.path.dirname(__file__)
with open(os.path.join(dataDir, 'data', 'AtomicData.dat')) as f:
for li in f:
fields = li.split()
if int(fields[0]) == Z:
atomicData = [float(x) for x in fields]
break
return atomicData[3]
class Element(object):
"""This class serves for accessing the scattering factors f0, f1 and f2 of
a chemical element. It can also report other atomic data listed in
``AtomicData.dat`` file adopted from XOP [XOP]_.
"""
def __init__(self, elem=None, table='Chantler'):
u"""
*elem*: str or int
The element can be specified by its name (case sensitive) or its
ordinal number.
*table*: str
This parameter is explained in the description of
:class:`Material`.
"""
if isinstance(elem, basestring):
self.name = elem
self.Z = elementsList.index(elem)
elif isinstance(elem, int):
self.name = elementsList[elem]
self.Z = elem
else:
raise NameError('Wrong element')
self.f0coeffs = self.read_f0_Kissel()
self.E, self.f1, self.f2 = self.read_f1f2_vs_E(table=table)
self.mass = read_atomic_data(self.Z)
def read_f0_Kissel(self):
r"""
Reads f0 scattering factors from the tabulation of XOP [XOP]_. These
were calculated by [Kissel]_ and then parameterized as [Waasmaier]_:
.. math::
f_0\left(\frac{q}{4\pi}\right) = c + \sum_{i=1}^5{a_i\exp\left(-b_i
\left(q/(4\pi)\right)^2\right)}
where :math:`q/(4\pi) = \sin{\theta} / \lambda` and :math:`a_i`,
:math:`b_i` and :math:`c` are the coefficients tabulated in the file
``f0_xop.dat``.
.. [Kissel] L. Kissel, Radiation physics and chemistry **59** (2000)
185-200, http://www-phys.llnl.gov/Research/scattering/RTAB.html
.. [Waasmaier] D. Waasmaier & A. Kirfel, Acta Cryst. **A51** (1995)
416-413
"""
dataDir = os.path.dirname(__file__)
with open(os.path.join(dataDir, 'data', 'f0_xop.dat')) as f:
for li in f:
if li.startswith("#S"):
fields = li.split()
if int(fields[1]) == self.Z:
break
else:
raise ValueError('cannot find the element {0}'.format(self.Z))
for li in f:
if li.startswith("#UP"):
if sys.version_info < (3, 1):
li = f.next()
else:
li = next(f)
break
else:
raise ValueError('wrong file format!')
return [float(x) for x in li.split()]
# = [a1 a2 a3 a4 a5 c b1 b2 b3 b4 b5 ]
def get_f0(self, qOver4pi=0): # qOver4pi = sin(theta) / lambda
"""Calculates f0 for the given *qOver4pi*."""
return self.f0coeffs[5] + sum(
a * np.exp(-b * qOver4pi**2)
for a, b in zip(self.f0coeffs[:5], self.f0coeffs[6:]))
def read_f1f2_vs_E(self, table):
"""Reads f1 and f2 scattering factors from the given *table* at the
instantiation time."""
dataDir = os.path.dirname(__file__)
# pname = os.path.join(dataDir, 'data', table+'.pickle')
# with open(pname, 'rb') as f:
# res = pickle.load(f, encoding='bytes') if isPython3 else\
# pickle.load(f)
# return res[self.Z]
table_fn = table.split()[0]
pname = os.path.join(dataDir, 'data', table_fn+'.npz')
f2key = '_f2tot' if 'total' in table else '_f2'
with open(pname, 'rb') as f:
res = np.load(f)
ef1f2 = (np.array(res[self.name+'_E']),
np.array(res[self.name+'_f1']),
np.array(res[self.name+f2key]))
return ef1f2
# pname = os.path.join(dataDir, 'data', table+'.Ef')
# E, f1, f2 = [], [], []
# startFound = False
# with open(pname, "rb") as f:
# while True:
# structEf1f2 = f.read(12)
# if not structEf1f2:
# break
# ELoc, f1Loc, f2Loc = struct.unpack_from("<3f", structEf1f2)
# if startFound and ELoc == -1:
# break
# if ELoc == -1 and f2Loc == self.Z:
# startFound = True
# continue
# if startFound:
# E.append(ELoc)
# f1.append(f1Loc - self.Z)
# f2.append(f2Loc)
# return np.array(E), np.array(f1), np.array(f2)
def get_f1f2(self, E):
"""Calculates (interpolates) f1 and f2 for the given array *E*."""
if np.any(E < self.E[0]) or np.any(E > self.E[-1]):
raise ValueError(
('E={0} is out of the data table range ' +
'[{1}, {2}]!!! Use another table.').format(
E[np.where((E < self.E[0]) | (E > self.E[-1]))], self.E[0],
self.E[-1]))
f1 = np.interp(E, self.E, self.f1)
f2 = np.interp(E, self.E, self.f2)
return f1 + 1j*f2
class Material(object):
"""
:class:`Material` serves for getting reflectivity, transmittivity,
refractive index and absorption coefficient of a material specified by its
chemical formula and density."""
def __init__(self, elements=None, quantities=None, kind='auto', rho=0,
t=None, table='Chantler total', efficiency=None,
efficiencyFile=None, name=''):
r"""
*elements*: str or sequence of str
Contains all the constituent elements (symbols)
*quantities*: None or sequence of floats of length of *elements*
Coefficients in the chemical formula. If None, the coefficients
are all equal to 1.
*kind*: str
One of 'mirror', 'thin mirror', 'plate', 'lens', 'grating', 'FZP'.
If 'auto', the optical element will decide which material kind to
use via its method :meth:`assign_auto_material_kind`.
*rho*: float
Density in g/cm\ :sup:`3`.
*t*: float
Thickness in mm, required only for 'thin mirror'.
*table*: str
At the time of instantiation the tabulated scattering factors of
each element are read and then interpolated at the requested **q**
value and energy. *table* can be 'Henke' (10 eV < *E* < 30 keV)
[Henke]_, 'Chantler' (11 eV < *E* < 405 keV) [Chantler]_ or 'BrCo'
(30 eV < *E* < 509 keV) [BrCo]_.
The tables of f2 factors consider only photoelectric
cross-sections. The tabulation by Chantler can optionally have
*total* absorption cross-sections. This option is enabled by
*table* = 'Chantler total'.
.. [Henke] http://henke.lbl.gov/optical_constants/asf.html
B.L. Henke, E.M. Gullikson, and J.C. Davis, *X-ray interactions:
photoabsorption, scattering, transmission, and reflection at
E=50-30000 eV, Z=1-92*, Atomic Data and Nuclear Data Tables
**54** (no.2) (1993) 181-342.
.. [Chantler] http://physics.nist.gov/PhysRefData/FFast/Text/cover.html
http://physics.nist.gov/PhysRefData/FFast/html/form.html
C. T. Chantler, *Theoretical Form Factor, Attenuation, and
Scattering Tabulation for Z = 1 - 92 from E = 1 - 10 eV to E = 0.4 -
1.0 MeV*, J. Phys. Chem. Ref. Data **24** (1995) 71-643.
.. [BrCo] http://www.bmsc.washington.edu/scatter/periodic-table.html
ftp://ftpa.aps.anl.gov/pub/cross-section_codes/
S. Brennan and P.L. Cowan, *A suite of programs for calculating
x-ray absorption, reflection and diffraction performance for a
variety of materials at arbitrary wavelengths*, Rev. Sci. Instrum.
**63** (1992) 850-853.
*efficiency*: sequence of pairs [*order*, *value*]
Can be given for *kind* = 'grating' and *kind* = 'FZP'. It must
correspond to the field *order* of the OE. It can be given as a
constant per diffraction order or as an energy dependence, also per
diffraction order. It is a sequence of pairs [*order*, *value*],
where *value* is either the efficiency itself or an index in the
data file given by *efficiencyFile*. The data file can either be
(1) a pickle file with *energy* and *efficiency* arrays as two
first dump elements and *efficiency* shape as (len(*energy*),
*orders*) or (2) a column file with energy in the leftmost column
and the order efficiencies in the next columns. The *value* is a
corresponding array index (zero-based) or a column number (also
zero-based, the 0th column is energy). An example of the efficiency
calculation can be found in
``\examples\withRaycing\11_Wave\waveGrating.py``.
*efficiencyFile*: str
See the definition of *efficiency*.
*name*: str
Material name. Not used by xrt. Can be used by the user for
annotations of graphs or other output purposes. If empty, the name
is constructed from the *elements* and the *quantities*.
"""
if isinstance(elements, basestring):
elements = elements,
if quantities is None:
self.quantities = [1. for elem in elements]
else:
self.quantities = quantities
self.elements = []
self.mass = 0.
if name:
self.name = name
autoName = False
else:
self.name = r''
autoName = True
for elem, xi in zip(elements, self.quantities):
newElement = Element(elem, table)
self.elements.append(newElement)
self.mass += xi * newElement.mass
if autoName:
self.name += elem
if xi != 1:
self.name += '$_{' + '{0}'.format(xi) + '}$'
self.kind = kind # 'mirror', 'thin mirror', 'plate', 'lens'
if self.kind == 'thin mirror':
if t is None:
raise ValueError('Give the thin mirror a thickness!')
self.t = t # t in mm
else:
self.t = t
self.rho = rho # density g/cm^3
self.geom = ''
self.efficiency = efficiency
self.efficiencyFile = efficiencyFile
if efficiencyFile is not None:
self.read_efficiency_file()
def read_efficiency_file(self):
cols = [c[1] for c in self.efficiency]
if self.efficiencyFile.endswith('.pickle'):
with open(self.efficiencyFile, 'rb') as f:
res = pickle.load(f)
es, eff = res[0], res[1].T[cols, :]
else:
es = np.loadtxt(self.efficiencyFile, usecols=(0,), unpack=True)
eff = (np.loadtxt(self.efficiencyFile, usecols=cols,
unpack=True)).reshape(len(cols), -1)
self.efficiency_E = es
self.efficiency_I = eff
def get_refractive_index(self, E):
r"""
Calculates refractive index at given *E*. *E* can be an array.
.. math::
n = 1 - \frac{r_0\lambda^2 N_A \rho}{2\pi M}\sum_i{x_i f_i(0)}
where :math:`r_0` is the classical electron radius, :math:`\lambda` is
the wavelength, :math:`N_A` is Avogadro’s number, :math:`\rho` is the
material density, *M* is molar mass, :math:`x_i` are atomic
concentrations (coefficients in the chemical formula) and
:math:`f_i(0)` are the complex atomic scattering factor for the forward
scattering.
"""
xf = np.zeros_like(E) * 0j
for elem, xi in zip(self.elements, self.quantities):
xf += (elem.Z + elem.get_f1f2(E)) * xi
return 1 - 1e-24 * AVOGADRO * R0 / PI2 * (CH/E)**2 * self.rho * \
xf / self.mass # 1e-24 = A^3/cm^3
def get_absorption_coefficient(self, E): # mu0
r"""
Calculates the linear absorption coefficient from the imaginary part of
refractive index. *E* can be an array. The result is in cm\ :sup:`-1`.
.. math::
\mu = \Im(n)/\lambda.
"""
return abs((self.get_refractive_index(E)).imag) * E / CHBAR * 2e8
def get_grating_efficiency(self, beam, good):
"""Gets grating efficiency from the parameters *efficiency* and
*efficiencyFile* supplied at the instantiation."""
resI = np.zeros(good.sum())
order = beam.order[good]
if self.efficiencyFile is None:
for eff in self.efficiency:
resI[order == eff[0]] = eff[1]
else:
E = beam.E[good]
Emin = self.efficiency_E[0]
Emax = self.efficiency_E[-1]
if (np.any(E < Emin) or np.any(E > Emax)):
raise ValueError(
('E={0} is out of the efficiency table range ' +
'[{1}, {2}]!!! Use another table.').format(
E[np.where((E < Emin) | (E > Emax))], Emin, Emax))
for ieff, eff in enumerate(self.efficiency):
resI[order == eff[0]] = np.interp(
E[order == eff[0]], self.efficiency_E,
self.efficiency_I[ieff])
resA = resI**0.5
return resA, resA, 0
def get_amplitude(self, E, beamInDotNormal, fromVacuum=True):
r"""
Calculates amplitude of reflectivity (for 'mirror' and 'thin mirror')
or transmittivity (for 'plate' and 'lens') [wikiFresnelEq]_,
[Als-Nielsen]_. *E* is energy, *beamInDotNormal* is cosine of the angle
between the incoming beam and the normal (:math:`\theta_1` below), both
can be scalars or arrays. The interface of the material is assumed to
be with vacuum; the direction is given by boolean *fromVacuum*. Returns
a tuple of the amplitudes of s and p polarizations and the absorption
coefficient in cm\ :sup:`-1`.
.. math::
r_s^{\rm mirror} &= \frac{n_1\cos{\theta_1} - n_2\cos{\theta_2}}
{n_1\cos{\theta_1} + n_2\cos{\theta_2}}\\
r_p^{\rm mirror} &= \frac{n_2\cos{\theta_1} - n_1\cos{\theta_2}}
{n_2\cos{\theta_1} + n_1\cos{\theta_2}}\\
r_{s,p}^{\rm thin\ mirror} &= r_{s,p}^{\rm mirror}\frac{1 - p^2}
{1 - (r_{s,p}^{\rm mirror})^2p^2},
where the phase factor
:math:`p^2 = \exp(2iEtn_2\cos{\theta_2}/c\hbar)`.
.. math::
t_s^{\rm plate,\ lens} &= 2\frac{n_1\cos{\theta_1}}
{n_1\cos{\theta_1} + n_2\cos{\theta_2}}t_f\\
t_p^{\rm plate,\ lens} &= 2\frac{n_1\cos{\theta_1}}
{n_2\cos{\theta_1} + n_1\cos{\theta_2}}t_f\\
where :math:`t_f = \sqrt{\frac{\Re(n_2n_1)\cos{\theta_2}}
{cos{\theta_1}}}/|n_1|`.
.. [wikiFresnelEq] http://en.wikipedia.org/wiki/Fresnel_equations .
.. [Als-Nielsen] Jens Als-Nielsen, Des McMorrow, *Elements of Modern
X-ray Physics*, John Wiley and Sons, 2001.
"""
# in case `assign_auto_material_kind hasn't happened before, which can be
# e.g. in calculator where materials are used without oes:
if self.kind == 'auto':
self.kind = 'mirror' # used to be the default kind before xrtQook
# if self.kind in ('grating', 'FZP'):
if self.kind in ('FZP'):
return 1, 1, 0
n = self.get_refractive_index(E)
if fromVacuum:
n1 = 1.
n2 = n
else:
n1 = n
n2 = 1.
cosAlpha = abs(beamInDotNormal)
sinAlpha2 = 1 - beamInDotNormal**2
if isinstance(sinAlpha2, np.ndarray):
sinAlpha2[sinAlpha2 < 0] = 0
n1cosAlpha = n1 * cosAlpha
# cosBeta = np.sqrt(1 - (n1.real/n2.real*sinAlpha)**2)
cosBeta = np.sqrt(1 - (n1/n2)**2*sinAlpha2)
n2cosBeta = n2 * cosBeta
if self.kind in ('mirror', 'thin mirror', 'grating'): # reflectivity
rs = (n1cosAlpha - n2cosBeta) / (n1cosAlpha + n2cosBeta)
rp = (n2*cosAlpha - n1*cosBeta) / (n2*cosAlpha + n1*cosBeta)
if self.kind == 'thin mirror':
p2 = np.exp(2j * E / CHBAR * n2cosBeta * self.t * 1e7)
rs *= (1 - p2) / (1 - rs**2*p2)
rp *= (1 - p2) / (1 - rp**2*p2)
elif self.kind in ('plate', 'lens', 'FZP'): # transmittivity
tf = np.sqrt(
(n2cosBeta * n1.conjugate()).real / cosAlpha) / abs(n1)
rs = 2 * n1cosAlpha / (n1cosAlpha + n2cosBeta) * tf
rp = 2 * n1cosAlpha / (n2*cosAlpha + n1*cosBeta) * tf
else:
raise ValueError('Unknown kind of material for {0}'.format(
self.name))
# return rs, rp, abs(n.imag) * E / CHBAR * 2e8 # 1/cm
return (rs, rp,
abs(n.imag) * E / CHBAR * 2e8, # 1/cm
n.real * E / CHBAR * 1e8)
class EmptyMaterial(object):
"""
This class provides an empty (i.e. without reflectivity) 'grating'
material. For other kinds of empty materials just use None.
"""
def __init__(self, kind='grating'):
self.kind = kind
self.geom = ''
class Multilayer(object):
u"""
:class:`Multilayer` serves for getting reflectivity of a multilayer. The
multilayer may have variable thicknesses of the two alternating layers as
functions of local *x* and *y* and/or as a function of the layer number.
"""
hiddenParams = ['power', 'substRoughness', 'tThicknessLow',
'bThicknessLow']
def __init__(self, tLayer=None, tThickness=0., bLayer=None, bThickness=0.,
nPairs=0., substrate=None, tThicknessLow=0., bThicknessLow=0.,
idThickness=0., power=2., substRoughness=0, name=''):
u"""
*tLayer*, *bLayer*, *substrate*: instance of :class:`Material`
The top layer material, the bottom layer material and the substrate
material.
*tThickness* and *bThickness*: float
The thicknesses of the layers in Å. If the multilayer is depth
graded, *tThickness* and *bThickness* are at the top and
*tThicknessLow* and *bThicknessLow* are at the substrate. If you
need laterally graded thicknesses, modify `get_t_thickness` and/or
`get_b_thickness` in a subclass.
*power*: float
Defines the exponent of the layer thickness power law, if the
multilayer is depth graded:
.. math::
d_n = A / (B + n)^{power}.
*tThicknessLow* and *bThicknessLow*: float
Are ignored (left as zeros) if not depth graded.
*nPairs*: int
The number of layer pairs.
*idThickness*: float
RMS thickness :math:`\\sigma_{j,j-1}` of the
interdiffusion/roughness interface in Å.
"""
self.tLayer = tLayer
self.tThicknessHigh = float(tThickness) # in Å
self.tThicknessLow = float(tThicknessLow) # in Å
self.bLayer = bLayer
self.bThicknessHigh = float(bThickness)
self.bThicknessLow = float(bThicknessLow) # in Å
self.nPairs = nPairs
self.substrate = substrate
self.d = float(tThickness + bThickness)
# self.tb = tThicknessTop/self.d
# self.dLow = float(tThicknessLow + bThicknessLow)
self.kind = 'multilayer'
self.geom = 'Bragg reflected'
self.idThickness = idThickness
self.subRough = substRoughness
if name:
self.name = name
else:
self.name = ''
layers = np.arange(1, nPairs+1)
if tThicknessLow:
tqRoot = (self.tThicknessHigh/self.tThicknessLow)**(1./power)
tqB = (nPairs-tqRoot) / (tqRoot-1.)
tqA = self.tThicknessHigh * (tqB+1)**power
self.dti = tqA * (tqB+layers)**(-power)
else:
self.dti = np.ones(self.nPairs) * float(tThickness)
# self.dti = np.array([float(tThickness)] * self.nPairs)
if bThicknessLow:
bqRoot = (self.bThicknessHigh/self.bThicknessLow)**(1./power)
bqB = (nPairs-bqRoot) / (bqRoot-1.)
bqA = self.bThicknessHigh * (bqB+1)**power
self.dbi = bqA * (bqB+layers)**(-power)
else:
self.dbi = np.ones(self.nPairs) * float(bThickness)
# self.dbi = np.array([float(bThickness)] * self.nPairs)
def get_sin_Bragg_angle(self, E, order=1):
"""ensures that -1 <= sin(theta) <= 1"""
a = order * CH / (2*self.d*E)
try:
a[a > 1] = 1 - 1e-16
a[a < -1] = -1 + 1e-16
except TypeError:
if a > 1:
a = 1 - 1e-16
elif a < -1:
a = -1 + 1e-16
return a
def get_Bragg_angle(self, E, order=1):
a = self.get_sin_Bragg_angle(E, order)
return np.arcsin(a)
def get_dtheta(self, E, order=1):
return self.get_dtheta_symmetric_Bragg(E, order=order)
def get_dtheta_symmetric_Bragg(self, E, order=1):
r"""
The angle correction for the symmetric Bragg case:
.. math::
\delta\theta = \theta_B - \arcsin(\sqrt{m^2\lambda^2 + 8 d^2
\overline\delta} / 2d),
where :math:`\overline\delta` is the period-averaged real part of the
refractive index.
"""
nt = self.tLayer.get_refractive_index(E).real if self.tLayer else 1.
nb = self.bLayer.get_refractive_index(E).real if self.bLayer else 1.
tThickness = self.tThicknessHigh
bThickness = self.bThicknessHigh
d_ = abs((nt-1) * tThickness + (nb-1) * bThickness) / self.d
return self.get_Bragg_angle(E) - np.arcsin(
((order * CH / E)**2 + self.d**2 * 8*d_)**0.5 / (2*self.d))
def get_t_thickness(self, x, y, iPair):
u"""
The top (the upper in the period pair) layer thickness in Å as a
function of local coordinates *x* and *y* and the index (zero at
vacuum) of the period pair.
For parametric surfaces, the *x* and *y* local coordinates are assumed
to be *s* and *phi* of the parametric representation."""
f = 1.
# f = np.random.normal(size=len(x))*self.tError + 1 if self.tError else 1
return self.dti[iPair] * f
def get_b_thickness(self, x, y, iPair):
u"""
The bottom (the lower in the period pair) layer thickness in Å as a
function of local coordinates *x* and *y* and the index (zero at
vacuum) of the period pair.
For parametric surfaces, the *x* and *y* local coordinates are assumed
to be *s* and *phi* of the parametric representation."""
f = 1.
# f = np.random.normal(size=len(x))*self.tError + 1 if self.tError else 1
return self.dbi[iPair] * f
def get_amplitude(self, E, beamInDotNormal, x=None, y=None, ucl=None):
r"""
Calculates amplitude of reflectivity [Als-Nielsen]_. *E* is energy,
*beamInDotNormal* is cosine of the angle between the incoming beam and
the normal (:math:`\theta_0` below), both can be scalars or arrays. The
top interface of the multilayer is assumed to be with vacuum. Returns a
tuple of the amplitudes of s and p polarizations.
The calculation starts from the bottommost layer (with index
:math:`N`). The reflectivity from its top into the adjacent layer
(:math:`N-1`) is:
.. math::
R_N = \frac{r_{N-1, N} + r_{N, N+1} p_N^2}
{1 + r_{N-1, N} r_{N, N+1} p_N^2},
where the capital :math:`R` denotes the net reflectivity of the layer
and the small letters :math:`r` denote the interface reflectivity
(Fresnel equations):
.. math::
r_{j, j+1} = \frac{Q_j - Q_{j+1}}{Q_j + Q_{j+1}},
here :math:`N+1` refers to the substrate material and
.. math::
Q_j = \sqrt{Q^2 - 8k^2\delta_j + i8k^2\beta_j},
Q = 2k\sin{\theta_0}
and :math:`\delta_j` and :math:`\beta_j` are parts of the refractive
index :math:`n_j = 1 - \delta_j + i\beta_j`. The phase factor
:math:`p_j^2` is :math:`\exp(i\Delta_j Q_j)`, :math:`\Delta_j` being
the layer thickness. The calculation proceeds recursively upwards by
layers as
.. math::
R_j = \frac{r_{j-1, j} + R_{j+1} p_j^2}
{1 + r_{j-1, j} R_{j+1} p_j^2},
until :math:`R_1` is reached, where the 0th layer is vacuum and
:math:`Q_0 = Q`.
If the interdiffusion thickness is not zero,
the reflectivity at each interface is attenuated by a factor
of :math:`exp(-2k_{j,z}k_{j-1,z}\sigma^{2}_{j,j-1})`,
where :math:`k_{j,z}` is longitudinal component of the wave vector
in j-th layer [Nevot-Croce]_.
The above formulas refer to *s* polarization. The *p* part differs at
the interface:
.. math::
r^p_{j, j+1} = \frac{Q_j\frac{n_{j+1}}{n_j} -
Q_{j+1}\frac{n_{j}}{n_{j+1}}}{Q_j\frac{n_{j+1}}{n_j} +
Q_{j+1}\frac{n_{j}}{n_{j+1}}}
and thus the *p* polarization part requires a separate recursive
chain.
.. [Nevot-Croce] L. Nevot and P. Croce, Rev. Phys. Appl. **15**,
(1980) 761
"""
k = E / CHBAR
nt = self.tLayer.get_refractive_index(E).conjugate() if self.tLayer else 1. # analysis:ignore
nb = self.bLayer.get_refractive_index(E).conjugate() if self.bLayer else 1. # analysis:ignore
ns = self.substrate.get_refractive_index(E).conjugate() if self.substrate else 1. # analysis:ignore
Q = 2 * k * abs(beamInDotNormal)
Q2 = Q**2
k28 = 8 * k**2
Qt = (Q2 + (nt-1)*k28)**0.5
Qb = (Q2 + (nb-1)*k28)**0.5
Qs = (Q2 + (ns-1)*k28)**0.5
id2 = self.idThickness**2
roughtb = np.exp(-0.5 * Qt * Qb * id2)
rtb_s = np.complex128((Qt-Qb) / (Qt+Qb) * roughtb)
rtb_p = np.complex128((Qt/nt*nb - Qb/nb*nt) / (Qt/nt*nb + Qb/nb*nt) *
roughtb)
rbt_s = -rtb_s
rbt_p = -rtb_p
roughvt = np.exp(-0.5 * Q * Qt * id2)
rvt_s = np.complex128((Q-Qt) / (Q+Qt) * roughvt)
rvt_p = np.complex128((Q*nt - Qt/nt) / (Q*nt + Qt/nt) * roughvt)
rmsbs = id2 if self.tLayer else self.subRough**2
roughbs = np.exp(-0.5 * Qb * Qs * rmsbs)
rbs_s = np.complex128((Qb-Qs) / (Qb+Qs) * roughbs)
rbs_p = np.complex128((Qb/nb*ns - Qs/ns*nb) / (Qb/nb*ns + Qs/ns*nb) *
roughbs)
rj_s, rj_p = rbs_s, rbs_p # bottom layer to substrate
ri_s = np.zeros_like(rj_s)
ri_p = np.zeros_like(rj_p)
t0 = time.time()
if ucl is None:
for i in reversed(range(2*self.nPairs)):
if i % 2 == 0:
if i == 0: # topmost layer
rij_s, rij_p = rvt_s, rvt_p
else:
rij_s, rij_p = rbt_s, rbt_p
p2i = np.complex128(
np.exp(1j*Qt*self.get_t_thickness(x, y, i//2)))
else:
rij_s, rij_p = rtb_s, rtb_p
p2i = np.complex128(
np.exp(1j*Qb*self.get_b_thickness(x, y, i//2)))
ri_s = (rij_s + rj_s*p2i) / (1 + rij_s*rj_s*p2i)
ri_p = (rij_p + rj_p*p2i) / (1 + rij_p*rj_p*p2i)
rj_s, rj_p = ri_s, ri_p
t2 = time.time()
if raycing._VERBOSITY_ > 10:
print('ML reflection calculated with CPU in {} s'.format(
t2-t0))
else:
scalarArgs = [np.int32(self.nPairs)]
slicedROArgs = [rbs_s, rbs_p,
rtb_s, rtb_p,
rvt_s, rvt_p,
Qt, Qb]
nonSlicedROArgs = [np.float64(self.dti), np.float64(self.dbi)]
slicedRWArgs = [ri_s,
ri_p]
try:
iterator = iter(E)
except TypeError: # not iterable
E *= np.ones_like(beamInDotNormal)
try:
iterator = iter(beamInDotNormal) # analysis:ignore
except TypeError: # not iterable
beamInDotNormal *= np.ones_like(E)
ri_s, ri_p = ucl.run_parallel(
'get_amplitude_graded_multilayer', scalarArgs, slicedROArgs,
nonSlicedROArgs, slicedRWArgs, None, len(E))
t2 = time.time()
if raycing._VERBOSITY_ > 10:
print('ML reflection calculated with OCL in {} s'.format(
t2-t0))
return ri_s, ri_p
class GradedMultilayer(Multilayer):
"""
Derivative class from :class:`Mutilayer` with graded layer thicknesses.
"""
hiddenParams = ['substRoughness']
class Coated(Multilayer):
"""
Derivative class from :class:`Mutilayer` with a single reflective layer on
a substrate.
"""
hiddenParams = ['tLayer', 'tThickness', 'bLayer', 'bThickness', 'power',
'tThicknessLow', 'bThicknessLow', 'idThickness',
'thicknessError', 'nPairs']
def __init__(self, *args, **kwargs):
u"""
*coating*, *substrate*: instance of :class:`Material`
Material of the mirror coating layer, and the substrate material.
*cThickness*: float
The thicknesses of mirror coating in Å.
*surfaceRoughness*: float
RMS rougness of the mirror surface in Å.
*substRoughness*: float
RMS rougness of the mirror substrate in Å.
"""
coating = kwargs.pop('coating', None)
cThickness = kwargs.pop('cThickness', 0)
surfaceRoughness = kwargs.pop('surfaceRoughness', 0)
super(Coated, self).__init__(
bLayer=coating, bThickness=cThickness,
idThickness=surfaceRoughness, nPairs=1, *args, **kwargs)
self.kind = 'mirror'
class Crystal(Material):
u"""The parent class for crystals. The descendants must define
:meth:`get_structure_factor`. :class:`Crystal` gives reflectivity and
transmittivity of a crystal in Bragg and Laue cases."""
hiddenParams = ['nuPoisson', 'calcBorrmann', 'useTT']
def __init__(self, hkl=[1, 1, 1], d=0, V=None, elements='Si',
quantities=None, rho=0, t=None, factDW=1.,
geom='Bragg reflected', table='Chantler', name='',
nuPoisson=0., calcBorrmann=None, useTT=False, mosaicity=0):
u"""
*hkl*: sequence
hkl indices.
*d*: float
Interatomic spacing in Å.
*V*: float
Unit cell volume in Å\ :sup:`3`. If not given, is calculated from
*d* assuming a cubic symmetry.
*factDW*: float
Debye-Waller factor applied to the structure factor.
*geom*: str
The 1st word is either 'Bragg' or 'Laue', the 2nd word is either
'transmitted' or 'reflected' or 'Fresnel' (the optical element must
then provide `local_g` method that gives the grating vector).
*table*: str
This parameter is explained in the description of the parent class
:class:`Material`.
*nuPoisson*: float
Poisson's ratio. Used to calculate the properties of bent crystals.
*calcBorrmann*: str
Controls the origin of the ray leaving the crystal. Can be 'None',
'uniform', 'Bessel' or 'TT'. If 'None', the point of reflection
is located on the surface of incidence. In all other cases the
coordinate of the exit point is sampled according to the
corresponding distribution: 'uniform' is a fast approximation for
thick crystals, 'Bessel' is exact solution for the flat crystals,
'TT' is exact solution of Takagi-Taupin equations for bent and flat
crystals ('TT' requires *targetOpenCL* in the Optical Element to be
not 'None' and *useTT* in the :class:`Crystal` to be 'True'. Not
recommended for crystals thicker than 100 µm due to heavy
computational load).
*useTT*: bool
Specifies whether the reflectivity will by calculated by analytical
formula or by solution of the Takagi-Taupin equations (so far only
for the Laue geometry). Must be set to 'True' in order to calculate
the reflectivity of bent crystals.
*mosaicity*: float, radians
The sigma of the normal distribution of the crystallite normals.
xrt follows the concept of mosaic crystals from
[SanchezDelRioMosaic]_. This concept has three main parts: (i) a
random distribution of the crystallite normals results in a
distribution in the reflected directions, (ii) the secondary
extinction results in a mean free path distribution in the new ray
origins and (iii) the reflectivity is calculated following the work
[BaconLowde]_.
.. note::
The mosaicity is assumed large compared with the Darwin width.
Therefore, there is no continuous transition mosaic-to-perfect
crystal at a continuously reduced mosaicity parameter.
See the tests :ref:`here <tests_mosaic>`.
.. [SanchezDelRioMosaic] M. Sánchez del Río et al.,
Rev. Sci. Instrum. 63 (1992) 932.
.. [BaconLowde] G. E. Bacon and R. D. Lowde,
Acta Crystallogr. 1, (1948) 303.
"""
super(Crystal, self).__init__(
elements, quantities, rho=rho, table=table, name=name)
self.hkl = hkl
self.sqrthkl2 = (sum(i**2 for i in hkl))**0.5
self.d = d
if V is None:
V = (d * self.sqrthkl2)**3
self.V = V
self.chiToF = -R0 / PI / self.V # minus!
self.chiToFd2 = abs(self.chiToF) * self.d**2
self.geom = geom
self.geometry = 2*int(geom.startswith('Bragg')) +\
int(geom.endswith('transmitted'))
self.factDW = factDW
self.kind = 'crystal'
self.t = t # in mm
self.nuPoisson = nuPoisson
self.calcBorrmann = calcBorrmann
self.useTT = useTT
self.mosaicity = mosaicity
# def get_amplitude_Authie(self, E, gamma0, gammah, beamInDotHNormal):
# """A. Authier, Dynamical Theory of X-ray Diffraction -1. Perfect
# Crystals, in X-ray and Neutron Dynamical Diffraction: Theory and
# Applications, ed. A. Authier, S. Lagomarsino & B. K. Tanner, NATO ASI
# Ser., Ser. B: Physics 357 (1996) 1–32, Plenum Press: New York and
# London."""
# def _dynamical_theory_Bragg():
# rx = np.sqrt(eta**2 - 1)
# if self.t is not None:
# arg = self.t * 1e7 * rx * math.pi/ lambdaExt
# if self.geom.endswith('transmitted'):
# mu0 = -twoPi / waveLength * chi0.imag
# att = np.exp(-mu0 / 4 * (-1. / gamma0 - 1. / gammah) *
# self.t)
# ta = att / (np.cos(arg) + 1j * eta * np.sin(arg) / rx)
# return ta
# eps = 1.0j / np.tan (arg)
# else:
# eps = 1.
# ra = 1. / (eta - rx * eps)
# rb = 1. / (eta + rx * eps)
# indB = np.where(abs(rb) < abs(ra))
# ra[indB] = rb[indB]
# return ra
# def _dynamical_theory_Laue():
# rx = np.sqrt(eta**2 + 1)
# mu0 = -twoPi / waveLength * chi0.imag
# t = self.t * 1e7
# att = np.exp(-mu0 / 4 * (-1. / gamma0 - 1. / gammah) * t)
# arg = t * rx * math.pi / lambdaExt
# if self.geom.endswith('transmitted'):
# ta = att * (np.cos(arg) + 1j * eta * np.sin(arg) / rx)
# return ta
# ra = abs(chih / chih_) * att * np.sin(arg) / rx
# return ra
# if self.geom.startswith('Bragg'):
# _dynamical_theory = _dynamical_theory_Bragg
# else:
# _dynamical_theory = _dynamical_theory_Laue
# waveLength = ch / E#the word "lambda" is reserved
# sinThetaOverLambda = abs(beamInDotHNormal / waveLength)
# F0, Fhkl, Fhkl_ = self.get_structure_factor(E, sinThetaOverLambda)
# lambdaSquare = waveLength ** 2
# chiToFlambdaSquare = self.chiToF * lambdaSquare
# chi0 = F0 * chiToFlambdaSquare
# chih = Fhkl * chiToFlambdaSquare
# chih_ = Fhkl_ * chiToFlambdaSquare
# gamma = gammah / gamma0# asymmetry parameter = 1/b
# theta = np.arcsin(abs(beamInDotHNormal))
# sin2theta = np.sin(2. * theta)
# cos2theta = np.cos(2. * theta)
# theta0 = np.arcsin(ch / (2 * self.d * E))
# dtheta0 = - chi0 * (1 - gamma) / 2 / sin2theta
# delta = np.sqrt(abs(gamma) * chih * chih_)/ sin2theta
# if self.t is not None:
# lambdaExt = waveLength * abs(gammah) / (delta * sin2theta)
# else:
# lambdaExt = None
# eta = (theta - theta0 - dtheta0) / delta
# # s polarization:
# resS = _dynamical_theory()
# # p polarization:
# eta /= cos2theta
# if self.t is not None:
# lambdaExt /= cos2theta
# resP = _dynamical_theory()
# return resS, resP
def get_F_chi(self, E, sinThetaOverLambda):
F0, Fhkl, Fhkl_ = self.get_structure_factor(E, sinThetaOverLambda)
waveLength = CH / E
lambdaSquare = waveLength**2
chiToFlambdaSquare = self.chiToF * lambdaSquare
# notice conjugate() needed for the formulas of Belyakov & Dmitrienko!!!
chi0 = F0.conjugate() * chiToFlambdaSquare
chih = Fhkl.conjugate() * chiToFlambdaSquare
chih_ = Fhkl_.conjugate() * chiToFlambdaSquare
return F0, Fhkl, Fhkl_, chi0, chih, chih_
def get_Darwin_width(self, E, b=1., polarization='s'):
r"""Calculates the Darwin width as
.. math::
2\delta = |C|\sqrt{\chi_h\chi_{\overline{h}} / b}/\sin{2\theta}
"""
theta0 = self.get_Bragg_angle(E)
sin2theta = np.sin(2. * theta0)
waveLength = CH / E # the word "lambda" is reserved
sinThetaOverL = np.sin(theta0) / waveLength
F0, Fhkl, Fhkl_, chi0, chih, chih_ = self.get_F_chi(E, sinThetaOverL)
if polarization == 's':
polFactor = 1.
else:
polFactor = np.cos(2. * theta0)
return 2 * (np.sqrt((polFactor**2 * chih*chih_ / b)) / sin2theta).real
def get_epsilon_h(self, E, b=1., polarization='s'):
r"""Calculates the relative spectral width :math:`epsilon_h` as
(Shvyd'ko)
.. math::
\epsilon_h = epsilon_h^{(s)}/\sqrt{|b|},
\epsilon_h^{(s)}/\sqrt{|b|} = \frac{4r_e d_h^2}{\pi V}|CF_h|.
"""
F0, Fhkl, Fhkl_, _, _, _ = self.get_F_chi(E, 0.5/self.d)
if polarization == 's':
polFactor = 1.
else:
theta0 = self.get_Bragg_angle(E)
polFactor = np.abs(np.cos(2. * theta0))
return 4 * self.chiToFd2 * polFactor * np.abs(Fhkl) / abs(b)**0.5
def get_Borrmann_out(self, goodN, oeNormal, lb, a_out, b_out, c_out,
alphaAsym=None, Rcurvmm=None, ucl=None, useTT=False):
asymmAngle = alphaAsym if alphaAsym is not None else 0
if Rcurvmm is not None:
Rcurv = Rcurvmm * 1e7
if ucl is None:
useTT = False
print('OpenCL is required for bent crystals calculations.')
print('Emulating perfect crystal.')
else:
Rcurv = np.inf
E = lb.E[goodN]
bLength = len(E)
if self.calcBorrmann.lower() in ['tt', 'bessel']:
thetaB = self.get_Bragg_angle(E)
beamOutDotNormal = a_out * oeNormal[-3] + \
b_out * oeNormal[-2] + c_out * oeNormal[-1]
beamInDotNormal = lb.a[goodN]*oeNormal[-3] +\
lb.b[goodN]*oeNormal[-2] + lb.c[goodN]*oeNormal[-1]
beamInDotHNormal = lb.a[goodN]*oeNormal[0] +\
lb.b[goodN]*oeNormal[1] + lb.c[goodN]*oeNormal[2]
waveLength = ch / E # the word "lambda" is reserved
thickness = self.t * 1e7 if self.t is not None else 0
k = PI2 / waveLength
HH = PI2 / self.d
F0, Fhkl, Fhkl_, chi0, chih, chih_ = self.get_F_chi(E, 0.5/self.d)
gamma_0h = beamInDotNormal * beamOutDotNormal
if thickness == 0:
N_layers = 10000
else:
N_layers = thickness / 200.
if N_layers < 2000:
N_layers = 2000
IhMap = np.zeros((bLength, (N_layers+1)))
for ipolFactor in [1., np.cos(2. * thetaB)]:
if useTT and self.calcBorrmann.lower() == 'tt':
k0H = abs(beamInDotHNormal) * HH * k
dtsin2tb = (HH**2/2. - k0H) / (k**2)
betah = dtsin2tb - 0.5 * chi0.conjugate()
pmod = thickness / np.abs(beamInDotNormal) / (N_layers-1)
qmod = thickness / np.abs(beamOutDotNormal) / (N_layers-1)
AA = -0.25j * k * ipolFactor * chih_.conjugate() * pmod
BB = -0.25j * k * ipolFactor * chih.conjugate() * qmod
WW = 0.5j * k * betah * qmod
VV = -0.25j * k * chi0.conjugate() * pmod
if Rcurvmm is not None:
if self.geom.startswith('Bragg'):
Wgrad = np.zeros_like(AA)
else:
Bm = np.sin(asymmAngle) *\
(1. + gamma_0h * (1. + self.nuPoisson)) /\
gamma_0h
Wgrad = -0.25j * HH * Bm * pmod * qmod / Rcurv
else:
Wgrad = np.zeros_like(AA)
D0_local = np.zeros(bLength*(N_layers+1),
dtype=np.complex128)
Dh_local = np.zeros(bLength*(N_layers+1),
dtype=np.complex128)
D0t = np.zeros(bLength*(N_layers+3), dtype=np.complex128)
Dht = np.zeros(bLength*(N_layers+3), dtype=np.complex128)
scalarArgs = [np.int32(N_layers)]
slicedROArgs = [np.complex128(Wgrad),
np.complex128(AA),
np.complex128(BB),
np.complex128(WW),
np.complex128(VV)]
nonSlicedROArgs = [D0t, Dht]
slicedRWArgs = [D0_local, Dh_local]
kernel = 'tt_laue_spherical'
if Rcurvmm is not None:
kernel += '_bent'
D0_local, Dh_local = ucl.run_parallel(
kernel, scalarArgs, slicedROArgs,
nonSlicedROArgs, slicedRWArgs, None, bLength)
if self.geom.endswith('transmitted'):
bFan = np.abs(D0_local.reshape((
bLength, (N_layers+1))))**2
else:
bFan = np.abs(Dh_local.reshape((
bLength, (N_layers+1))))**2
else:
sqrtchchm = np.sqrt(chih.conjugate()*chih_.conjugate())
exctDepth = waveLength * np.sqrt(np.abs(gamma_0h)) /\
sqrtchchm/ipolFactor
yrange = np.linspace(-1, 1, N_layers+1)
besselArgument = PI * thickness / exctDepth
bFan = np.abs(besselJn(
0, besselArgument[:, np.newaxis] *
np.sqrt(1.-np.square(yrange))))**2
IhMap += bFan
IhMax = np.max(IhMap, axis=1)
# Now sampling the position along the base of the Borrmann fan
index = np.array(range(bLength))
iLeft = index
raysLeft = bLength
totalX = np.zeros(bLength)
counter = 0
while raysLeft > 0:
counter += 1
disc = np.random.random(raysLeft)*IhMax[index]
rawRand = np.random.random(raysLeft)
xrand = rawRand * 2. - 1.
if useTT:
deltaRand, ipLeft = np.modf(rawRand * N_layers)
rndmIntensity = IhMap[index, np.int32(ipLeft)] *\
(1. - deltaRand) +\
IhMap[index, np.int32(np.ceil(rawRand * N_layers))] *\
deltaRand
else:
rndmIntensity = np.abs(besselJn(
0, besselArgument[index] *
np.sqrt(1-np.square(xrand))))**2
passed = np.where(rndmIntensity > disc)[0]
totalX[index[passed]] = xrand[passed]
iLeft = np.where(rndmIntensity <= disc)[0]
index = index[iLeft]
raysLeft = len(index)
totalX = 0.5*(totalX + 1.)
elif self.calcBorrmann == 'uniform':
totalX = np.random.random(bLength)
else: # You should never get here
totalX = 0.5*np.ones(bLength)
return totalX
def get_amplitude(self, E, beamInDotNormal, beamOutDotNormal=None,
beamInDotHNormal=None):
r"""
Calculates complex amplitude reflectivity and transmittivity for s- and
p-polarizations (:math:`\gamma = s, p`) in Bragg and Laue cases for the
crystal of thickness *L*, based upon Belyakov & Dmitrienko [BD]_:
.. math::
R_{\gamma}^{\rm Bragg} &= \chi_{\vec{H}}C_{\gamma}\left(\alpha +
i\Delta_{\gamma}\cot{l_{\gamma}}\right)^{-1}|b|^{-\frac{1}{2}}\\
T_{\gamma}^{\rm Bragg} &= \left(\cos{l{_\gamma}} - i\alpha\Delta
{_\gamma}^{-1}\sin{l_{\gamma}}\right)^{-1}
\exp{\left(i\vec{\kappa}_0^2 L
(\chi_0 - \alpha b) (2\vec{\kappa}_0\vec{s})^{-1}\right)}\\
R_{\gamma}^{\rm Laue} &= \chi_{\vec{H}}C_{\gamma}
\Delta_{\gamma}^{-1}\sin{l_{\gamma}}\exp{\left(i\vec{\kappa}_0^2 L
(\chi_0 - \alpha b) (2\vec{\kappa}_0\vec{s})^{-1}\right)}
|b|^{-\frac{1}{2}}\\
T_{\gamma}^{\rm Laue} &= \left(\cos{l_{\gamma}} + i\alpha
\Delta_{\gamma}^{-1}\sin{l_{\gamma}}\right)
\exp{\left(i\vec{\kappa}_0^2
L (\chi_0 - \alpha b) (2\vec{\kappa}_0\vec{s})^{-1}\right)}
where
.. math::
\alpha &= \frac{\vec{H}^2 + 2\vec{\kappa}_0\vec{H}}
{2\vec{\kappa}_0^2}+\frac{\chi_0(1-b)}{2b}\\
\Delta_{\gamma} &= \left(\alpha^2 +\frac{C_{\gamma}^2\chi_{\vec{H}}
\chi_{\overline{\vec{H}}}}{b}\right)^{\frac{1}{2}}\\
l_{\gamma} &= \frac{\Delta_{\gamma}\vec{\kappa}_0^2L}
{2\vec{\kappa}_{\vec{H}}\vec{s}}\\
b &= \frac{\vec{\kappa}_0\vec{s}}{\vec{\kappa}_{\vec{H}}\vec{s}}\\
C_s &= 1, \quad C_p = \cos{2\theta_B}
In the case of thick crystal in Bragg geometry:
.. math::
R_{\gamma}^{\rm Bragg} = \frac{\chi_{\vec{H}} C_{\gamma}}
{\alpha\pm\Delta_{\gamma}}|b|^{-\frac{1}{2}}
with the sign in the denominator that gives the smaller modulus of
:math:`R_\gamma`.
:math:`\chi_{\vec{H}}` is the Fourier harmonic of the x-ray
susceptibility, and :math:`\vec{H}` is the reciprocal lattice vector of
the crystal. :math:`\vec{\kappa}_0` and :math:`\vec{\kappa}_{\vec{H}}`
are the wave vectors of the direct and diffracted waves.
:math:`\chi_{\vec{H}}` is calculated as:
.. math::
\chi_{\vec{H}} = - \frac{r_0\lambda^2}{\pi V}F_{\vec{H}},
where :math:`r_e = e^2 / mc^2` is the classical radius of the electron,
:math:`\lambda` is the wavelength, *V* is the volume of the unit cell.
Notice :math:`|b|^{-\frac{1}{2}}` added to the formulas of Belyakov &
Dmitrienko in the cases of Bragg and Laue reflections. This is needed
because ray tracing deals not with wave fields but with rays and
therefore not with intensities (i.e. per cross-section) but with flux.
.. [BD] V. A. Belyakov and V. E. Dmitrienko, *Polarization phenomena in
x-ray optics*, Uspekhi Fiz. Nauk. **158** (1989) 679–721, Sov. Phys.
Usp. **32** (1989) 697–719.
"""
def for_one_polarization(polFactor):
delta = np.sqrt((alpha**2 + polFactor**2 * chih * chih_ / b))
if self.t is None: # thick Bragg
# if (alpha==np.nan).sum()>0: print('(alpha==np.nan).sum()>0!')
with np.errstate(divide='ignore'):
ra = chih * polFactor / (alpha+delta)
ad = alpha - delta
ad[ad == 0] = 1e-100
rb = chih * polFactor / ad
indB = np.where(np.isnan(ra))
ra[indB] = rb[indB]
indB = np.where(abs(rb) < abs(ra))
ra[indB] = rb[indB]
# if np.isnan(ra).sum() > 0:
# if (alpha == -delta).sum() > 0:
# print('alpha = -delta!', (alpha == -delta).sum())
# print('alpha ',alpha[alpha == -delta])
# print('delta ', delta[alpha == -delta])
# print('chih ', chih[alpha == -delta])
# print('b ', b[alpha == -delta]_
# if (alpha == delta).sum() > 0:
# print('alpha = delta!', (alpha == delta).sum())
# if np.isnan(alpha).sum() > 0:
# print('alpha contains nan!')
# if np.isnan(delta).sum() > 0:
# print('delta contains nan!')
# if np.isnan(chih).sum() > 0:
# print('chih contains nan!')
# raise ValueError('reflectivity contains nan!')
return ra / np.sqrt(abs(b))
t = self.t * 1e7
l = t * delta * k02 / 2. / kHs
if self.geom.startswith('Bragg'):
if self.geom.endswith('transmitted'):
ra = 1 / (np.cos(l) - 1j * alpha * np.sin(l) / delta) *\
np.exp(1j * k02 * t * (chi0 - alpha*b) / 2 / k0s)
else:
ra = chih * polFactor / (alpha + 1j*delta / np.tan(l))
else: # Laue
if self.geom.endswith('transmitted'):
ra = (np.cos(l) + 1j * alpha * np.sin(l) / delta) *\
np.exp(1j * k02 * t * (chi0 - alpha*b) / 2 / k0s)
else:
ra = chih * polFactor * np.sin(l) / delta *\
np.exp(1j * k02 * t * (chi0 - alpha*b) / 2 / k0s)
if not self.geom.endswith('transmitted'):
ra /= np.sqrt(abs(b))
return ra
waveLength = CH / E # the word "lambda" is reserved
k = PI2 / waveLength
k0s = -beamInDotNormal * k
if beamOutDotNormal is None:
beamOutDotNormal = -beamInDotNormal
kHs = -beamOutDotNormal * k
if beamInDotHNormal is None:
beamInDotHNormal = beamInDotNormal
HH = PI2 / self.d
k0H = abs(beamInDotHNormal) * HH * k
k02 = k**2
H2 = HH**2
kHs0 = kHs == 0
kHs[kHs0] = 1
b = k0s / kHs
b[kHs0] = -1
F0, Fhkl, Fhkl_, chi0, chih, chih_ = self.get_F_chi(E, 0.5/self.d)
thetaB = self.get_Bragg_angle(E)
alpha = (H2/2 - k0H) / k02 + chi0/2 * (1/b - 1)
curveS = for_one_polarization(1.) # s polarization
polFactor = np.cos(2. * thetaB)
curveP = for_one_polarization(polFactor) # p polarization
return curveS, curveP # , phi.real
def get_amplitude_TT(self, E, beamInDotNormal, beamOutDotNormal=None,
beamInDotHNormal=None, alphaAsym=None,
Rcurvmm=None, ucl=None):
def for_one_polarization_TT(polFactor):
if thickness == 0:
pmod = 1.0e2/np.abs(beamInDotNormal)
qmod = 1.0e2/np.abs(beamOutDotNormal)
else:
pmod = thickness/np.abs(beamInDotNormal)/N_layers
qmod = thickness/np.abs(beamOutDotNormal)/N_layers
AA = -0.25j * k * polFactor * chih_.conjugate() * pmod
BB = -0.25j * k * polFactor * chih.conjugate() * qmod
WW = 0.5j * k * beta_h * qmod
VV = -0.25j * k * chi0.conjugate() * pmod
gamma_0h = beamInDotNormal * beamOutDotNormal
if Rcurvmm is not None:
if self.geom.startswith('Bragg'):
Wgrad = np.zeros_like(AA)
print("Bending in Bragg geometry is not implemented")
print("Emulating perfect crystal.")
# Bending in reflection geometry is not implemented
# if thickness == 0:
# Wgrad = -0.5 * 1j * qmod**2 *\
# (1. - beamOutDotNormal**2) * HH *\
# np.cos(alphaAsym) / Rcurv
else:
Bm = np.sin(asymmAngle) *\
(1. + gamma_0h * (1. + self.nuPoisson)) / gamma_0h
# Calculating reflectivities in Laue geometry is still experimental
# Use at your own risk
Wgrad = -0.25j * HH * Bm * pmod * qmod / Rcurv
else:
Wgrad = np.zeros_like(AA)
D0_local = np.zeros_like(AA)
Dh_local = np.zeros_like(AA)
scalarArgs = [np.int32(N_layers)]
slicedROArgs = [np.complex128(Wgrad),
np.complex128(AA),
np.complex128(BB),
np.complex128(WW),
np.complex128(VV)]
slicedRWArgs = [D0_local, Dh_local]
if self.geom.startswith('Bragg'):
D0t = np.zeros(bLength*(N_layers+1), dtype=np.complex128)
Dht = np.zeros(bLength*(N_layers+1), dtype=np.complex128)
nonSlicedROArgs = [D0t, Dht]
kernel = "tt_bragg"
else:
nonSlicedROArgs = None
kernel = "tt_laue"
if Rcurvmm is None:
kernel += '_plain'
else:
kernel += '_plain_bent'
D0_local, Dh_local = ucl.run_parallel(
kernel, scalarArgs, slicedROArgs,
nonSlicedROArgs, slicedRWArgs, None, bLength)
if self.geom.endswith('transmitted'):
ra = D0_local
else:
ra = Dh_local
if not self.geom.endswith('transmitted'):
ra /= np.sqrt(abs(beamInDotNormal/beamOutDotNormal))
return ra
asymmAngle = alphaAsym if alphaAsym is not None else 0
waveLength = CH / E # the word "lambda" is reserved
k = PI2 / waveLength
k0s = -beamInDotNormal * k
if beamOutDotNormal is None:
beamOutDotNormal = -beamInDotNormal
kHs = -beamOutDotNormal * k
if beamInDotHNormal is None:
beamInDotHNormal = beamInDotNormal
HH = PI2 / self.d
k0H = abs(beamInDotHNormal) * HH * k
k02 = k**2
H2 = HH**2
kHs0 = kHs == 0
kHs[kHs0] = 1
b = k0s / kHs
b[kHs0] = -1
F0, Fhkl, Fhkl_, chi0, chih, chih_ = self.get_F_chi(E, 0.5/self.d)
thetaB = self.get_Bragg_angle(E)
alpha = (H2/2 - k0H) / k02 + chi0/2 * (1/b - 1)
thickness = 0 if self.t is None else self.t * 1e7
if thickness == 0:
N_layers = 10000
else:
N_layers = thickness / 100.
if N_layers < 2000:
N_layers = 2000
bLength = len(E)
dtsin2tb = (H2/2. - k0H) / (k**2)
if Rcurvmm in [0, None]:
Rcurv = np.inf
else:
Rcurv = Rcurvmm * 1.0e7
beta_h = dtsin2tb - 0.5 * chi0.conjugate()
curveS = for_one_polarization_TT(1.) # s polarization
polFactor = np.cos(2. * thetaB)
curveP = for_one_polarization_TT(polFactor) # p polarization
return curveS, curveP # , phi.real
def get_amplitude_mosaic(self, E, beamInDotNormal, beamOutDotNormal=None,
beamInDotHNormal=None):
"""Based on Bacon and Lowde"""
def for_one_polarization(Q):
a = Q*w / mu
b = (1 + 2*a)**0.5
if self.t is None: # thick Bragg
return a / (1 + a + b)
A = mu*t / g0
if self.geom.startswith('Bragg'):
return a / (1 + a + b/np.tanh(A*b)) # Eq. (17)
else: # Laue
# return np.sinh(A*a) * np.exp(-A*(1+a)) # Eq. (18)
sigma = Q*w / g0
overGamma = 0.5 * (1/g0 + 1/gH)
overG = 0.5 * (1/g0 - 1/gH)
sm = (sigma**2 + mu**2*overG**2)**0.5
sGamma = sigma + mu*overGamma
# Eq. (24):
return sigma/sm * np.sinh(sm*t) * np.exp(-sGamma*t)
Qs, Qp, thetaB = self.get_kappa_Q(E)[2:5] # in cm^-1
if beamInDotHNormal is None:
beamInDotHNormal = beamInDotNormal
delta = np.arcsin(np.abs(beamInDotHNormal)) - thetaB
g0 = np.abs(beamInDotNormal)
gH = g0 if beamOutDotNormal is None else np.abs(beamOutDotNormal)
w = np.exp(-0.5*delta**2/self.mosaicity**2) / (SQRT2PI*self.mosaicity)
mu = self.get_absorption_coefficient(E) # in cm^-1
if self.geom.startswith('Bragg'):
mu *= 0.5 * (1 + g0/gH) # Eq. (23)
if self.t is not None:
t = self.t*0.1 # t is in cm
curveS = for_one_polarization(Qs)
curveP = for_one_polarization(Qp)
return curveS**0.5, curveP**0.5
def get_kappa_Q(self, E):
"""kappa: inversed extinction length;
Q: integrated reflecting power per unit propagation path.
Returned as a tuple (kappas, kappap, Qs, Qp), all in cm^-1."""
thetaB = self.get_Bragg_angle(E) - self.get_dtheta(E)
waveLength = CH / E
F0, Fhkl, Fhkl_, chi0, chih, chih_ = self.get_F_chi(E, 0.5/self.d)
polFactor = np.cos(2*thetaB)
# kappas = abs(chih) / waveLength * PI # or the same:
kappas = abs(Fhkl) * waveLength * R0 / self.V
Qs = kappas**2 * waveLength / np.sin(2*thetaB)
kappap = kappas * abs(polFactor)
Qp = Qs * polFactor**2 # as by Kato, note power 1 in Shadow paper
# return kappas, kappap, Qs, Qp # in Å^-1
return kappas*1e8, kappap*1e8, Qs*1e8, Qp*1e8, thetaB # in cm^-1
def get_extinction_lengths(self, E):
"""Returns a tuple of primary extinction lengths for s and p and, if
mosaicity is given, secondary extinction lengths: l1s, l1p, {l2s, l2p},
all in mm."""
kappas, kappap, Qs, Qp = self.get_kappa_Q(E)[0:4]
if self.mosaicity:
w = 1. / (SQRT2PI*self.mosaicity)
return 10./kappas, 10./kappap, 10./(w*Qs), 10./(w*Qp) # in mm
else:
return 10./kappas, 10./kappap # in mm
def get_extinction_depth(self, E, polarization='s'):
"""Same as get_extinction_length but measured normal to the surface."""
theta = self.get_Bragg_angle(E)
res = self.get_extinction_length(E, polarization)
return [r * np.sin(theta) for r in res]
def get_sin_Bragg_angle(self, E, order=1):
"""ensures that -1 <= sin(theta) <= 1"""
a = order * CH / (2*self.d*E)
try:
a[a > 1] = 1 - 1e-16
a[a < -1] = -1 + 1e-16
except TypeError:
if a > 1:
a = 1 - 1e-16
elif a < -1:
a = -1 + 1e-16
return a
def get_Bragg_angle(self, E, order=1):
a = self.get_sin_Bragg_angle(E, order)
return np.arcsin(a)
def get_backscattering_energy(self):
return CH / (2*self.d)
def get_dtheta_symmetric_Bragg(self, E):
r"""
The angle correction for the symmetric Bragg case:
.. math::
\delta\theta = \chi_0 / \sin{2\theta_B}
"""
F0, Fhkl, Fhkl_ = self.get_structure_factor(E, 0.5 / self.d)
waveLength = CH / E # the word "lambda" is reserved
lambdaSquare = waveLength ** 2
chiToFlambdaSquare = self.chiToF * lambdaSquare
chi0 = F0 * chiToFlambdaSquare
thetaB = self.get_Bragg_angle(E)
return (chi0 / np.sin(2*thetaB)).real
def get_dtheta(self, E, alpha=None):
r"""
.. _get_dtheta:
The angle correction for the general asymmetric case:
.. math::
\delta\theta = \frac{\mp \gamma_0 \pm \sqrt{\gamma_0^2 \mp
(\gamma_0 - \gamma_h) \sqrt{1 - \gamma_0^2} \chi_0 /
\sin{2\theta_B}}}{\sqrt{1 - \gamma_0^2}}\\
where :math:`\gamma_0 = \sin(\theta_B + \alpha)`,
:math:`\gamma_h = \mp \sin(\theta_B - \alpha)` and the upper sign is
for Bragg and the lower sign is for Laue geometry.
"""
if alpha is None:
alpha = 0
thetaB = self.get_Bragg_angle(E)
pm = -1 if self.geom.startswith('Bragg') else 1
gamma0 = np.sin(thetaB + alpha)
gammah = pm * np.sin(thetaB - alpha)
symm_dt = self.get_dtheta_symmetric_Bragg(E)
osqg0 = np.sqrt(1. - gamma0**2)
dtheta0 = (pm*gamma0 - pm*np.sqrt(gamma0**2 +
pm*(gamma0 - gammah) * osqg0 * symm_dt)) / osqg0
return -dtheta0
def get_dtheta_regular(self, E, alpha=None):
r"""
The angle correction for the general asymmetric case in its simpler
version:
.. math::
\delta\theta = (1 - b)/2 \cdot \chi_0 / \sin{2\theta_B}\\
|b| = \sin(\theta_B + \alpha) / \sin(\theta_B - \alpha)
For the symmetric Bragg *b* = -1 and for the symmetric Laue *b* = +1.
"""
if alpha is not None:
thetaB = self.get_Bragg_angle(E)
b = np.sin(thetaB + alpha) / np.sin(thetaB - alpha)
if self.geom.startswith('Bragg'):
b *= -1
return (1 - b)/2 * self.get_dtheta_symmetric_Bragg(E)
else:
if self.geom.startswith('Bragg'):
return self.get_dtheta_symmetric_Bragg(E)
else:
return 0.
def get_refractive_correction(self, E, beamInDotNormal=None, alpha=None):
r"""
The difference in the glancing angle of incidence for incident and exit
waves, Eqs. (2.152) and (2.112) in [Shvydko_XRO]_:
.. math::
\theta_c - \theta'_c = \frac{w_H^{(s)}}{2} \left(b - \frac{1}{b}
\right) \tan{\theta_c}
.. note::
Not valid close to backscattering.
.. [Shvydko_XRO] Yu. Shvyd'ko, X-Ray Optics High-Energy-Resolution
Applications, Springer-Verlag Berlin Heidelberg, 2004.
"""
thetaB = self.get_Bragg_angle(E)
bothNone = (beamInDotNormal is None) and (alpha is None)
bothNotNone = (beamInDotNormal is not None) and (alpha is not None)
if bothNone or bothNotNone:
raise ValueError(
"one of 'beamInDotNormal' or 'alpha' must be given")
if beamInDotNormal is not None:
# beamInDotNormal[beamInDotNormal > 1] = 1 - 1e-16
alpha = np.arcsin(beamInDotNormal) - thetaB
if alpha is not None:
beamInDotNormal = np.sin(thetaB + alpha)
pm = -1 if self.geom.startswith('Bragg') else 1
beamOutDotNormal = pm * np.sin(thetaB - alpha)
b = beamInDotNormal / beamOutDotNormal
F0, _, _ = self.get_structure_factor(E, needFhkl=False)
return -self.chiToFd2 * F0.real * (b - 1/b) * np.tan(thetaB)
class CrystalFcc(Crystal):
r"""
A derivative class from :class:`Crystal` that defines the structure factor
for an fcc crystal as:
.. math::
F_{hkl}^{fcc} = f \times \left\{ \begin{array}{rl}
4 &\mbox{if $h,k,l$ are all even or all odd} \\ 0 &\mbox{ otherwise}
\end{array} \right.
"""
def get_structure_factor(self, E, sinThetaOverLambda=0, needFhkl=True):
anomalousPart = self.elements[0].get_f1f2(E)
F0 = 4 * (self.elements[0].Z+anomalousPart) * self.factDW
residue = sum(i % 2 for i in self.hkl)
if residue == 0 or residue == 3:
f0 = self.elements[0].get_f0(sinThetaOverLambda) if needFhkl else 0
Fhkl = 4 * (f0+anomalousPart) * self.factDW
else:
Fhkl = 0.
return F0, Fhkl, Fhkl
class CrystalDiamond(CrystalFcc):
r"""
A derivative class from :class:`Crystal` that defines the structure factor
for a diamond-like crystal as:
.. math::
F_{hkl}^{\rm diamond} = F_{hkl}^{fcc}\left(1 + e^{i\frac{\pi}{2}
(h + k + l)}\right).
"""
def get_structure_factor(self, E, sinThetaOverLambda=0, needFhkl=True):
diamondToFcc = 1 + np.exp(0.5j * PI * sum(self.hkl))
F0, Fhkl, Fhkl_ = super(CrystalDiamond, self).get_structure_factor(
E, sinThetaOverLambda, needFhkl)
return F0 * 2, Fhkl * diamondToFcc, Fhkl_ * diamondToFcc.conjugate()
class CrystalSi(CrystalDiamond):
"""
A derivative class from :class:`CrystalDiamond` that defines the crystal
d-spacing as a function of temperature.
"""
def __init__(self, *args, **kwargs):
"""
*tK*: float
Temperature in Kelvin.
*hkl*: sequence
hkl indices.
"""
self.tK = kwargs.pop('tK', 297.15)
self.hkl = kwargs.get('hkl', (1, 1, 1))
# O'Mara, William C. Handbook of Semiconductor Silicon Technology.
# William Andrew Inc. (1990) pp. 349–352.
self.a0 = 5.430710
self.dl_l0 = self.dl_l(273.15 + 19.9)
self.sqrthkl2 = (sum(i**2 for i in self.hkl))**0.5
kwargs['d'] = self.get_a() / self.sqrthkl2
kwargs['elements'] = 'Si'
kwargs['hkl'] = self.hkl
# Mechanics of Materials, 23 (1996), p.314
kwargs['nuPoisson'] = 0.22
super(CrystalSi, self).__init__(*args, **kwargs)
def dl_l(self, t=None):
"""Calculates the crystal elongation at temperature *t*. Uses the
parameterization from [Swenson]_. Less than 1% error; the reference
temperature is 19.9C; data is in units of unitless; *t* must be in
degrees Kelvin.
.. [Swenson] C.A. Swenson, J. Phys. Chem. Ref. Data **12** (1983) 179
"""
if t is None:
t = self.tK
if t >= 0.0 and t < 30.0:
return -2.154537e-004
elif t >= 30.0 and t < 130.0:
return -2.303956e-014 * t**4 + 7.834799e-011 * t**3 - \
1.724143e-008 * t**2 + 8.396104e-007 * t - 2.276144e-004
elif t >= 130.0 and t < 293.0:
return -1.223001e-011 * t**3 + 1.532991e-008 * t**2 - \
3.263667e-006 * t - 5.217231e-005
elif t >= 293.0 and t <= 1000.0:
return -1.161022e-012 * t**3 + 3.311476e-009 * t**2 + \
1.124129e-006 * t - 5.844535e-004
else:
return 1.0e+100
def get_a(self):
"""Gives the lattice parameter."""
return self.a0 * (self.dl_l()-self.dl_l0+1)
def get_Bragg_offset(self, E, Eref):
"""Calculates the Bragg angle offset due to a mechanical (mounting)
misalignment.
*E* is the calculated energy of a spectrum feature, typically the edge
position.
*Eref* is the tabulated position of the same feature."""
self.d = self.get_a() / self.sqrthkl2
chOverTwod = CH / 2 / self.d
return np.arcsin(chOverTwod/E) - np.arcsin(chOverTwod/Eref)
class CrystalFromCell(Crystal):
""":class:`CrystalFromCell` builds a crystal from cell parameters and
atomic positions which can be found e.g. in Crystals.dat of XOP [XOP]_ or
xraylib.
Examples:
>>> xtalQu = rm.CrystalFromCell(
>>> 'alphaQuartz', (1, 0, 2), a=4.91304, c=5.40463, gamma=120,
>>> atoms=[14]*3 + [8]*6,
>>> atomsXYZ=[[0.4697, 0., 0.],
>>> [-0.4697, -0.4697, 1./3],
>>> [0., 0.4697, 2./3],
>>> [0.4125, 0.2662, 0.1188],
>>> [-0.1463, -0.4125, 0.4521],
>>> [-0.2662, 0.1463, -0.2145],
>>> [0.1463, -0.2662, -0.1188],
>>> [-0.4125, -0.1463, 0.2145],
>>> [0.2662, 0.4125, 0.5479]])
>>>
>>> xtalGr = rm.CrystalFromCell(
>>> 'graphite', (0, 0, 2), a=2.456, c=6.696, gamma=120,
>>> atoms=[6]*4, atomsXYZ=[[0., 0., 0.], [0., 0., 0.5],
>>> [1./3, 2./3, 0.], [2./3, 1./3, 0.5]])
>>>
>>> xtalBe = rm.CrystalFromCell(
>>> 'Be', (0, 0, 2), a=2.287, c=3.583, gamma=120,
>>> atoms=[4]*2, atomsXYZ=[[1./3, 2./3, 0.25], [2./3, 1./3, 0.75]])
"""
def __init__(self, name='', hkl=[1, 1, 1],
a=5.430710, b=None, c=None, alpha=90, beta=90, gamma=90,
atoms=[14]*8,
atomsXYZ=[[0., 0., 0.],
[0., 0.5, 0.5],
[0.5, 0.5, 0.],
[0.5, 0., 0.5],
[0.25, 0.25, 0.25],
[0.25, 0.75, 0.75],
[0.75, 0.25, 0.75],
[0.75, 0.75, 0.25]],
atomsFraction=None, tK=0,
t=None, factDW=1.,
geom='Bragg reflected', table='Chantler total',
nuPoisson=0., calcBorrmann=None, useTT=False, mosaicity=0):
u"""
*name*: str
Crystal name. Not used by xrt.
*hkl*: sequence
hkl indices.
*a*, *b*, *c*: float
Cell parameters in Å. *a* must be given. *b*, *c*, if not given,
are equlized to *a*.
*alpha*, *beta*, *gamma*: float
Cell angles in degrees. If not given, are equal to 90.
*atoms*: list of str or list of int
List of atoms in the cell given by element Z's or element names.
*atomsXYZ*: list of 3-sequences
List of atomic coordinates in cell units.
.. note::
*atoms* and *atomsXYZ* must contain *all* the atoms, not only
the unique ones for a given symmetry group (we do not consider
symmetry here). For example, the unit cell of magnetite (Fe3O4)
has 3 unique atomic positions and 56 in total; here, all 56 are
needed.
*atomsFraction*: a list of float or None
Atomic fractions. If None, all values are 1.
*nuPoisson*: float
Poisson's ratio. Used to calculate the properties of bent crystals.
*calcBorrmann*: str
Controls the origin of the ray leaving the crystal. Can be 'None',
'uniform', 'Bessel' or 'TT'. If 'None', the point of reflection
is located on the surface of incidence. In all other cases the
coordinate of the exit point is sampled according to the
corresponding distribution: 'uniform' is a fast approximation for
thick crystals, 'Bessel' is exact solution for the flat crystals,
'TT' is exact solution of Takagi-Taupin equations for bent and flat
crystals ('TT' requires *targetOpenCL* in the Optical Element to be
not 'None' and *useTT* in the :class:`Crystal` to be 'True'. Not
recommended for crystals thicker than 100 µm due to heavy
computational load).
*useTT*: bool
Specifies whether the reflectivity will by calculated by analytical
formula or by solution of the Takagi-Taupin equations (so far only
for the Laue geometry). Must be set to 'True' in order to calculate
the reflectivity of bent crystals.
"""
self.name = name
self.hkl = hkl
h, k, l = hkl
self.tK = 0
self.a = a
self.b = a if b is None else b
self.c = a if c is None else c
self.alpha = np.radians(alpha)
self.beta = np.radians(beta)
self.gamma = np.radians(gamma)
self.atoms = atoms
self.elements = []
self.atomsXYZ = atomsXYZ
uniqueElements = {}
for atom in atoms:
if atom in uniqueElements:
element = uniqueElements[atom]
else:
element = Element(atom, table)
uniqueElements[atom] = element
self.elements.append(element)
self.atomsFraction =\
[1 for atom in atoms] if atomsFraction is None else atomsFraction
self.quantities = self.atomsFraction
ca, cb, cg = np.cos((self.alpha, self.beta, self.gamma))
sa, sb, sg = np.sin((self.alpha, self.beta, self.gamma))
self.V = self.a * self.b * self.c *\
(1 - ca**2 - cb**2 - cg**2 + 2*ca*cb*cg)**0.5
self.mass = 0.
for atom, xi in zip(atoms, self.atomsFraction):
self.mass += xi * element.mass
self.rho = self.mass / AVOGADRO / self.V * 1e24
self.d = self.V / (self.a * self.b * self.c) *\
((h*sa/self.a)**2 + (k*sb/self.b)**2 + (l*sg/self.c)**2 +
2*h*k * (ca*cb - cg) / (self.a*self.b) +
2*h*l * (ca*cg - cb) / (self.a*self.c) +
2*k*l * (cb*cg - ca) / (self.b*self.c))**(-0.5)
self.chiToF = -R0 / PI / self.V # minus!
self.chiToFd2 = abs(self.chiToF) * self.d**2
self.geom = geom
self.geometry = 2*int(geom.startswith('Bragg')) +\
int(geom.endswith('transmitted'))
self.factDW = factDW
self.kind = 'crystal'
self.t = t # in mm
self.nuPoisson = nuPoisson
self.calcBorrmann = calcBorrmann
self.useTT = useTT
self.mosaicity = mosaicity
def get_structure_factor(self, E, sinThetaOverLambda=0, needFhkl=True):
F0, Fhkl, Fhkl_ = 0, 0, 0
uniqueElements = {}
for el, xyz, af in zip(
self.elements, self.atomsXYZ, self.atomsFraction):
if el.Z in uniqueElements:
f0, anomalousPart = uniqueElements[el.Z]
else:
f0 = el.get_f0(sinThetaOverLambda) if needFhkl else 0
anomalousPart = el.get_f1f2(E)
uniqueElements[el.Z] = f0, anomalousPart
F0 += af * (el.Z+anomalousPart) * self.factDW
fact = af * (f0+anomalousPart) * self.factDW
expiHr = np.exp(2j * np.pi * np.dot(xyz, self.hkl))
Fhkl += fact * expiHr
Fhkl_ += fact / expiHr
return F0, Fhkl, Fhkl_
class Powder(CrystalFromCell):
u"""
A derivative class from :class:`CrystalFromCell` with randomly distributed
atomic plane orientations similar to the real polycrystalline powders. The
distribution is uniform in the spherical coordinates, so that the angles of
longitudinal and transverse deflection (θ and χ) are both functions of
uniformly sampled over [0, 1) variables μ and ν: θ = arccos(μ), χ = 2πν.
The class parameter *hkl* defines the highest reflex, so that
reflectivities are calculated for all possible combinations of indices
[mnp], where 0 ≤ m ≤ h, 0 ≤ n ≤ k, 0 ≤ p ≤ l. Only one reflection with the
highest amplitude is picked for each incident ray.
.. warning::
Heavy computational load. Requires OpenCL.
"""
def __init__(self, *args, **kwargs):
u"""
*chi*: 2-list of floats [min, max]
Limits of the χ angle distribution. Zero and π/2 angles correspond
to the positive directions of *x* and *z* axes.
"""
kwargs = self.__pop_kwargs(**kwargs)
CrystalFromCell.__init__(self, *args, **kwargs)
self.kind = 'powder'
def __pop_kwargs(self, **kwargs):
self.chi = kwargs.pop('chi', [0, 0.5*np.pi])
return kwargs
class CrystalHarmonics(CrystalFromCell):
u"""
A derivative class from :class:`CrystalFromCell`, used to calculate
multiple orders of the given reflex in one run: n*[hkl], where 1 ≤ n ≤ Nmax
i.e. [111], [222], [333] or [220], [440], [660]. Only one harmonic with
highest reflectivity is picked for each incident ray. Use this class to
estimate the efficiency of higher harmonic rejection schemes.
.. warning::
Heavy computational load. Requires OpenCL.
"""
def __init__(self, *args, **kwargs):
u"""
*Nmax*: int
Specifies the highest order of reflection to be calculated.
"""
kwargs = self.__pop_kwargs(**kwargs)
CrystalFromCell.__init__(self, *args, **kwargs)
self.kind = 'crystal harmonics'
def __pop_kwargs(self, **kwargs):
self.Nmax = kwargs.pop('Nmax', 3)
return kwargs
class MonoCrystal(CrystalFromCell):
u"""
A derivative class from :class:`CrystalFromCell`, used for calculation of
the single crystal diffraction patterns (so far cubic symettries only).
Similar to the parent class, parameter *hkl* defines the cut orientation,
whereas *Nmax* stands for the highest index to consider, i.e. for every ray
the code would calculate the range of reflexes from [-Nmax, -Nmax, -Nmax]
to [Nmax, Nmax, Nmax] (required amount of reflectivity calculations is
therefore 2*(2*Nmax+1)^3 per every ray), but only return one of them
regarding their intensities. Brighter reflexes would be selected with
higher probability.
.. warning::
Heavy computational load. Requires OpenCL. Decent GPU highly
recommended.
"""
def __init__(self, *args, **kwargs):
u"""
*Nmax*: int
Specifies the highest order of reflection to be calculated.
"""
kwargs = self.__pop_kwargs(**kwargs)
CrystalFromCell.__init__(self, *args, **kwargs)
self.kind = 'monocrystal'
def __pop_kwargs(self, **kwargs):
self.Nmax = kwargs.pop('Nmax', 3)
return kwargs
| 40.378014
| 108
| 0.541519
|
3e0675eae4580b43bb6dedbe0b732ca983f0e7c6
| 1,541
|
py
|
Python
|
templogboss/myapp/migrations/0001_initial.py
|
Bossabossy/Templog
|
1d653f9a1c23d189940950bce808a3e26d618e15
|
[
"BSD-2-Clause"
] | null | null | null |
templogboss/myapp/migrations/0001_initial.py
|
Bossabossy/Templog
|
1d653f9a1c23d189940950bce808a3e26d618e15
|
[
"BSD-2-Clause"
] | null | null | null |
templogboss/myapp/migrations/0001_initial.py
|
Bossabossy/Templog
|
1d653f9a1c23d189940950bce808a3e26d618e15
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-19 04:29
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Attribute',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('attribute', models.CharField(max_length=100)),
('description', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Node',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('node_name', models.CharField(max_length=100)),
('description', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Record',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('description', models.CharField(max_length=100)),
('node', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='myapp.Node')),
],
),
]
| 35.022727
| 114
| 0.576898
|
bb47ffa2da441ef000bf149e60f6b3bcce0b1574
| 5,451
|
py
|
Python
|
python/ccxt/async/fybse.py
|
morgwn-shaw/bttb
|
a0e8dac53f233f747ad1c50c13a1d4b2d0ca14a5
|
[
"MIT"
] | 3
|
2017-11-19T22:08:29.000Z
|
2018-02-21T11:14:41.000Z
|
python/ccxt/async/fybse.py
|
morgwn-shaw/bttb
|
a0e8dac53f233f747ad1c50c13a1d4b2d0ca14a5
|
[
"MIT"
] | null | null | null |
python/ccxt/async/fybse.py
|
morgwn-shaw/bttb
|
a0e8dac53f233f747ad1c50c13a1d4b2d0ca14a5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from ccxt.async.base.exchange import Exchange
import hashlib
from ccxt.base.errors import ExchangeError
class fybse (Exchange):
def describe(self):
return self.deep_extend(super(fybse, self).describe(), {
'id': 'fybse',
'name': 'FYB-SE',
'countries': 'SE', # Sweden
'hasCORS': False,
'rateLimit': 1500,
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766512-31019772-5edb-11e7-8241-2e675e6797f1.jpg',
'api': 'https://www.fybse.se/api/SEK',
'www': 'https://www.fybse.se',
'doc': 'http://docs.fyb.apiary.io',
},
'api': {
'public': {
'get': [
'ticker',
'tickerdetailed',
'orderbook',
'trades',
],
},
'private': {
'post': [
'test',
'getaccinfo',
'getpendingorders',
'getorderhistory',
'cancelpendingorder',
'placeorder',
'withdraw',
],
},
},
'markets': {
'BTC/SEK': {'id': 'SEK', 'symbol': 'BTC/SEK', 'base': 'BTC', 'quote': 'SEK'},
},
})
async def fetch_balance(self, params={}):
balance = await self.privatePostGetaccinfo()
btc = float(balance['btcBal'])
symbol = self.symbols[0]
quote = self.markets[symbol]['quote']
lowercase = quote.lower() + 'Bal'
fiat = float(balance[lowercase])
crypto = {
'free': btc,
'used': 0.0,
'total': btc,
}
result = {'BTC': crypto}
result[quote] = {
'free': fiat,
'used': 0.0,
'total': fiat,
}
result['info'] = balance
return self.parse_balance(result)
async def fetch_order_book(self, symbol, params={}):
orderbook = await self.publicGetOrderbook(params)
return self.parse_order_book(orderbook)
async def fetch_ticker(self, symbol, params={}):
ticker = await self.publicGetTickerdetailed(params)
timestamp = self.milliseconds()
last = None
volume = None
if 'last' in ticker:
last = float(ticker['last'])
if 'vol' in ticker:
volume = float(ticker['vol'])
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': float(ticker['bid']),
'ask': float(ticker['ask']),
'vwap': None,
'open': None,
'close': None,
'first': None,
'last': last,
'change': None,
'percentage': None,
'average': None,
'baseVolume': volume,
'quoteVolume': None,
'info': ticker,
}
def parse_trade(self, trade, market):
timestamp = int(trade['date']) * 1000
return {
'info': trade,
'id': str(trade['tid']),
'order': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'type': None,
'side': None,
'price': float(trade['price']),
'amount': float(trade['amount']),
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
market = self.market(symbol)
response = await self.publicGetTrades(params)
return self.parse_trades(response, market)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
response = await self.privatePostPlaceorder(self.extend({
'qty': amount,
'price': price,
'type': side[0].upper()
}, params))
return {
'info': response,
'id': response['pending_oid'],
}
async def cancel_order(self, id, symbol=None, params={}):
return await self.privatePostCancelpendingorder({'orderNo': id})
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + path
if api == 'public':
url += '.json'
else:
nonce = self.nonce()
body = self.urlencode(self.extend({'timestamp': nonce}, params))
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'key': self.apiKey,
'sig': self.hmac(self.encode(body), self.encode(self.secret), hashlib.sha1)
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
async def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = await self.fetch2(path, api, method, params, headers, body)
if api == 'private':
if 'error' in response:
if response['error']:
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| 34.283019
| 126
| 0.476059
|
ddbc6b17f3fe667db8e7d26b09ea439ec8106500
| 889
|
py
|
Python
|
warehouse/helpers.py
|
domenkozar/warehouse
|
2c26400268acc76bf6b27c22c0469415738e9d22
|
[
"Apache-2.0"
] | null | null | null |
warehouse/helpers.py
|
domenkozar/warehouse
|
2c26400268acc76bf6b27c22c0469415738e9d22
|
[
"Apache-2.0"
] | null | null | null |
warehouse/helpers.py
|
domenkozar/warehouse
|
2c26400268acc76bf6b27c22c0469415738e9d22
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
def url_for(request, endpoint, **values):
force_external = values.pop("_force_external", False)
return request.url_adapter.build(
endpoint, values,
force_external=force_external,
)
| 37.041667
| 74
| 0.76378
|
97e56a3a2c9fd2bf51c75f36d5b5185d0e8859b4
| 7,889
|
py
|
Python
|
tests/test_deepflatten.py
|
Abhisheknishant/iteration_utilities
|
b2bf8d8668ed54d1aadf8c31884fc8a7d28551cc
|
[
"Apache-2.0"
] | 72
|
2016-09-12T03:01:02.000Z
|
2022-03-05T16:54:45.000Z
|
tests/test_deepflatten.py
|
Abhisheknishant/iteration_utilities
|
b2bf8d8668ed54d1aadf8c31884fc8a7d28551cc
|
[
"Apache-2.0"
] | 127
|
2016-09-14T02:07:33.000Z
|
2022-03-19T13:17:32.000Z
|
tests/test_deepflatten.py
|
Abhisheknishant/iteration_utilities
|
b2bf8d8668ed54d1aadf8c31884fc8a7d28551cc
|
[
"Apache-2.0"
] | 11
|
2017-02-22T20:40:37.000Z
|
2022-03-05T16:55:40.000Z
|
# Licensed under Apache License Version 2.0 - see LICENSE
import collections
import pickle
import pytest
from iteration_utilities import deepflatten
import helper_funcs as _hf
from helper_cls import T, toT
def test_deepflatten_empty1():
assert list(deepflatten([])) == []
def test_deepflatten_attributes1():
it = deepflatten([[T(1)], T(2)])
assert it.depth == -1
assert it.currentdepth == 0
assert it.ignore is None
assert it.types is None
assert next(it) == T(1)
assert it.currentdepth == 1
def test_deepflatten_normal1():
assert list(deepflatten([T(1), T(2), T(3)])) == [T(1), T(2), T(3)]
def test_deepflatten_normal2():
assert list(deepflatten([[T(1)], T(2), [[T(3)]]])) == toT([1, 2, 3])
def test_deepflatten_normal3():
# really deeply nested thingy
assert list(deepflatten([[[[[[[[[[[T(5), T(4), T(3), T(2), T(1), T(0)]]]]],
map(T, range(3))]]],
(T(i) for i in range(5))]]])
) == toT([5, 4, 3, 2, 1, 0, 0, 1, 2, 0, 1, 2, 3, 4])
def test_deepflatten_normal4():
# really deeply nested thingy with types
assert list(deepflatten([[[[[[[[[[[T(5), T(4), T(3), T(2), T(1), T(0)]]]]],
[T(0), T(1), T(2)]]]],
[T(0), T(1), T(2), T(3), T(4)]]]], types=list)
) == toT([5, 4, 3, 2, 1, 0, 0, 1, 2, 0, 1, 2, 3, 4])
def test_deepflatten_containing_strings1():
# no endless recursion even if we have strings in the iterable
assert list(deepflatten(["abc", "def"])) == ['a', 'b', 'c', 'd', 'e', 'f']
def test_deepflatten_containing_strings2():
# no endless recursion even if we have strings in the iterable and gave
# strings as types
assert list(deepflatten(["abc", "def"],
types=str)) == ['a', 'b', 'c', 'd', 'e', 'f']
def test_deepflatten_containing_strings3():
# mixed with strings
assert list(deepflatten(["abc", ("def",), "g", [[{'h'}], 'i']],
)) == ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']
def test_deepflatten_depth1():
assert list(deepflatten([T(1), T(2), T(3)], 1)) == toT([1, 2, 3])
def test_deepflatten_depth2():
assert list(deepflatten([[T(1)], T(2), [[T(3)]]],
1)) == [T(1), T(2), [T(3)]]
def test_deepflatten_types1():
assert list(deepflatten([[T(1)], T(2), [[T(3)]]],
types=list)) == toT([1, 2, 3])
def test_deepflatten_types2():
assert list(deepflatten([[T(1)], T(2), [[T(3)]]],
types=tuple)) == [[T(1)], T(2), [[T(3)]]]
def test_deepflatten_types3():
assert list(deepflatten([[T(1)], T(2), ([T(3)], )],
types=(list, tuple))) == toT([1, 2, 3])
def test_deepflatten_ignore1():
assert list(deepflatten([[T(1)], T(2), [[T(3), 'abc']]],
ignore=str)) == [T(1), T(2), T(3), 'abc']
def test_deepflatten_ignore2():
assert list(deepflatten([[T(1)], T(2), ([T(3), 'abc'], )],
ignore=(tuple, str))
) == [T(1), T(2), ([T(3), 'abc'], )]
def test_deepflatten_failure1():
with pytest.raises(TypeError):
list(deepflatten([T(1), T(2), T(3)], None, T('a')))
def test_deepflatten_failure2():
# recursively iterable data structures like strings that return another
# string in their iter.
with pytest.raises(RecursionError):
list(deepflatten([collections.UserString('abc')]))
def test_deepflatten_failure3():
# Test that a failing iterator doesn't raise a SystemError
with pytest.raises(_hf.FailNext.EXC_TYP, match=_hf.FailNext.EXC_MSG):
next(deepflatten(_hf.FailNext()))
def test_deepflatten_failure4():
# Test that a failing iterator doesn't raise a SystemError
with pytest.raises(_hf.FailNext.EXC_TYP, match=_hf.FailNext.EXC_MSG):
next(deepflatten([[_hf.FailNext()], 2]))
def test_deepflatten_failure5():
with pytest.raises(_hf.FailIter.EXC_TYP, match=_hf.FailIter.EXC_MSG):
deepflatten(_hf.FailIter())
def test_deepflatten_failure6():
# specified not iterable type as types
with pytest.raises(TypeError):
list(deepflatten([T(1), 2., T(3), T(4)], types=float))
def test_deepflatten_failure7():
# object that raises something else than TypeError when not iterable
with pytest.raises(_hf.FailIter.EXC_TYP, match=_hf.FailIter.EXC_MSG):
list(deepflatten([T(1), _hf.FailIter(), T(3), T(4)]))
def test_deepflatten_failure8():
# accessing iterator after exhausting the iterable
df = deepflatten(toT([1, 2, 3, 4]))
assert list(df) == toT([1, 2, 3, 4])
nothing = object()
assert next(df, nothing) is nothing
def test_deepflatten_failure9():
# Check that everyting is working even if isinstance fails
df = deepflatten(toT([1, 2, 3, 4]), types=_hf.FailingIsinstanceClass)
with pytest.raises(_hf.FailingIsinstanceClass.EXC_TYP, match=_hf.FailingIsinstanceClass.EXC_MSG):
list(df)
def test_deepflatten_failure10():
# Check that everyting is working even if isinstance fails
df = deepflatten(toT([1, 2, 3, 4]), ignore=_hf.FailingIsinstanceClass)
with pytest.raises(_hf.FailingIsinstanceClass.EXC_TYP, match=_hf.FailingIsinstanceClass.EXC_MSG):
list(df)
def test_deepflatten_copy1():
_hf.iterator_copy(deepflatten(toT([1, 2, 3, 4])))
def test_deepflatten_failure_setstate1():
# using __setstate__ to pass in an invalid iteratorlist
df = deepflatten(toT([1, 2, 3, 4]))
with pytest.raises(TypeError):
df.__setstate__(({'a', 'b', 'c'}, 0, 0))
def test_deepflatten_failure_setstate2():
# using __setstate__ to pass in an invalid iteratorlist (not iterator
# inside)
df = deepflatten(toT([1, 2, 3, 4]))
with pytest.raises(TypeError):
df.__setstate__(([set(toT([1, 2, 3, 4]))], 0, 0))
def test_deepflatten_failure_setstate3():
# using __setstate__ to pass in an invalid currentdepth (too low)
df = deepflatten(toT([1, 2, 3, 4]))
with pytest.raises(ValueError):
df.__setstate__(([iter(toT([1, 2, 3, 4]))], -3, 0))
def test_deepflatten_failure_setstate4():
# using __setstate__ to pass in an invalid currentdepth (too high)
df = deepflatten(toT([1, 2, 3, 4]))
with pytest.raises(ValueError):
df.__setstate__(([iter(toT([1, 2, 3, 4]))], 5, 0))
def test_deepflatten_failure_setstate5():
_hf.iterator_setstate_list_fail(deepflatten(toT([1, 2, 3, 4])))
def test_deepflatten_failure_setstate6():
_hf.iterator_setstate_empty_fail(deepflatten(toT([1, 2, 3, 4])))
def test_deepflatten_reduce1():
# Earlier we were able to modify the iteratorlist (including deleting
# parts of it). That could lead to segmentation faults.
df = deepflatten(toT([1, 2, 3, 4, 5, 6]))
next(df)
# Clear the iteratorlist from all items.
df.__reduce__()[2][0][:] = []
next(df)
def test_deepflatten_setstate1():
# We could keep a reference to the iteratorlist passed to setstate and
# mutate it (leading to incorrect behavior and segfaults).
df = deepflatten(toT([1, 2, 3, 4, 5, 6]))
next(df)
# Easiest way is to roundtrip the state but keep the state as variable so
# we can modify it!
state = df.__reduce__()[2]
df.__setstate__(state)
state[0][:] = []
next(df)
def test_deepflatten_pickle1(protocol):
dpflt = deepflatten([[T(1)], [T(2)], [T(3)], [T(4)]])
assert next(dpflt) == T(1)
x = pickle.dumps(dpflt, protocol=protocol)
assert list(pickle.loads(x)) == toT([2, 3, 4])
def test_deepflatten_pickle2(protocol):
dpflt = deepflatten([['abc', T(1)], [T(2)], [T(3)], [T(4)]])
assert next(dpflt) == 'a'
x = pickle.dumps(dpflt, protocol=protocol)
assert list(pickle.loads(x)) == ['b', 'c'] + toT([1, 2, 3, 4])
| 32.069106
| 101
| 0.608822
|
93b922f12b743218239d5fc33c2b69909778c190
| 1,348
|
py
|
Python
|
rail/creation/degradation/degrader.py
|
yanzastro/RAIL
|
588ad550b21bcd60a8c33243f720ee3729f6ab7e
|
[
"MIT"
] | null | null | null |
rail/creation/degradation/degrader.py
|
yanzastro/RAIL
|
588ad550b21bcd60a8c33243f720ee3729f6ab7e
|
[
"MIT"
] | null | null | null |
rail/creation/degradation/degrader.py
|
yanzastro/RAIL
|
588ad550b21bcd60a8c33243f720ee3729f6ab7e
|
[
"MIT"
] | null | null | null |
""" Abstract base class defining a degrader
The key feature is that the __call__ method takes a pandas DataFrame and a
seed, and returns a pandas DataFrame, and wraps the run method
"""
import pandas as pd
from rail.core.stage import RailStage
from rail.core.data import PqHandle
class Degrader(RailStage):
"""Base class Degraders, which apply various degradations to synthetic photometric data"""
name = 'Degrader'
config_options = RailStage.config_options.copy()
config_options.update(seed=12345)
inputs = [('input', PqHandle)]
outputs = [('output', PqHandle)]
def __init__(self, args, comm=None):
"""Initialize Degrader that can degrade photometric data"""
RailStage.__init__(self, args, comm=comm)
def __call__(self, sample: pd.DataFrame, seed: int = None) -> pd.DataFrame:
"""Return a degraded sample.
Parameters
----------
sample : pd.DataFrame
The sample to be degraded
seed : int, default=None
An integer to set the numpy random seed
Returns
-------
pd.DataFrame
The degraded sample
"""
if seed is not None:
self.config.seed = seed
self.set_data('input', sample)
self.run()
self.finalize()
return self.get_handle('output')
| 29.304348
| 94
| 0.635015
|
5d99b4faf918fcee807753fb3c4f5598f6aa3152
| 6,625
|
py
|
Python
|
dynaconf/vendor/ruamel/yaml/dumper.py
|
rroden12/dynaconf
|
9495fdd2145dfc93a9af700c104f7841e52221b2
|
[
"MIT"
] | 5
|
2020-10-01T00:43:32.000Z
|
2021-10-30T16:15:27.000Z
|
dynaconf/vendor/ruamel/yaml/dumper.py
|
rroden12/dynaconf
|
9495fdd2145dfc93a9af700c104f7841e52221b2
|
[
"MIT"
] | 61
|
2021-01-10T12:59:01.000Z
|
2021-06-24T09:19:20.000Z
|
dynaconf/vendor/ruamel/yaml/dumper.py
|
rroden12/dynaconf
|
9495fdd2145dfc93a9af700c104f7841e52221b2
|
[
"MIT"
] | 6
|
2020-10-05T00:53:35.000Z
|
2022-02-23T20:18:47.000Z
|
# coding: utf-8
from __future__ import absolute_import
from ruamel.yaml.emitter import Emitter
from ruamel.yaml.serializer import Serializer
from ruamel.yaml.representer import (
Representer,
SafeRepresenter,
BaseRepresenter,
RoundTripRepresenter,
)
from ruamel.yaml.resolver import Resolver, BaseResolver, VersionedResolver
if False: # MYPY
from typing import Any, Dict, List, Union, Optional # NOQA
from ruamel.yaml.compat import StreamType, VersionType # NOQA
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (Any, StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
BaseRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
BaseResolver.__init__(self, loadumper=self)
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
SafeRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
Resolver.__init__(self, loadumper=self)
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
Representer.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
Resolver.__init__(self, loadumper=self)
class RoundTripDumper(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Optional[bool], Optional[int], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
top_level_colon_align=top_level_colon_align,
prefix_colon=prefix_colon,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
RoundTripRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
VersionedResolver.__init__(self, loader=self)
| 29.842342
| 193
| 0.581585
|
96534abf0c5c34ac22cf5f5b0d60caa1123c716c
| 796
|
py
|
Python
|
src/command_modules/azure-cli-lab/azure/cli/command_modules/lab/sdk/devtestlabs/models/artifact_paged.py
|
saurabsa/azure-cli-old
|
f77477a98c9aa9cb55daf5b0d2f410d1455a9225
|
[
"MIT"
] | null | null | null |
src/command_modules/azure-cli-lab/azure/cli/command_modules/lab/sdk/devtestlabs/models/artifact_paged.py
|
saurabsa/azure-cli-old
|
f77477a98c9aa9cb55daf5b0d2f410d1455a9225
|
[
"MIT"
] | 2
|
2021-03-25T21:38:56.000Z
|
2021-11-15T17:46:45.000Z
|
src/command_modules/azure-cli-lab/azure/cli/command_modules/lab/sdk/devtestlabs/models/artifact_paged.py
|
Visual-Studio-China/azure-cli-int
|
48c7c7f371a0ecc4ebfd4dcfdc72764beddf5c31
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# coding: utf-8
# pylint: skip-file
from msrest.paging import Paged
class ArtifactPaged(Paged):
"""
A paging container for iterating over a list of Artifact object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[Artifact]'}
}
def __init__(self, *args, **kwargs):
super(ArtifactPaged, self).__init__(*args, **kwargs)
| 33.166667
| 94
| 0.5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.